var/home/core/zuul-output/0000755000175000017500000000000015114025225014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114032036015464 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003665375615114032027017715 0ustar rootrootDec 03 12:20:52 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 12:20:52 crc restorecon[4556]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:52 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 12:20:53 crc restorecon[4556]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 12:20:53 crc kubenswrapper[4849]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.727040 4849 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730458 4849 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730474 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730479 4849 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730483 4849 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730487 4849 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730491 4849 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730494 4849 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730497 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730501 4849 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730504 4849 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730508 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730511 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730514 4849 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730518 4849 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730521 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730524 4849 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730527 4849 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730531 4849 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730534 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730537 4849 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730540 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730543 4849 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730546 4849 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730549 4849 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730552 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730556 4849 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730564 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730567 4849 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730571 4849 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730574 4849 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730577 4849 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730580 4849 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730583 4849 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730587 4849 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730591 4849 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730594 4849 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730598 4849 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730602 4849 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730606 4849 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730609 4849 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730612 4849 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730617 4849 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730622 4849 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730626 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730629 4849 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730633 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730637 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730656 4849 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730661 4849 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730665 4849 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730669 4849 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730674 4849 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730678 4849 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730681 4849 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730684 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730688 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730691 4849 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730695 4849 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730700 4849 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730705 4849 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730708 4849 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730712 4849 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730716 4849 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730719 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730722 4849 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730726 4849 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730729 4849 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730733 4849 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730736 4849 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730739 4849 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.730742 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730814 4849 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730821 4849 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730828 4849 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730833 4849 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730838 4849 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730843 4849 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730848 4849 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730853 4849 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730857 4849 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730861 4849 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730866 4849 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730870 4849 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730874 4849 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730877 4849 flags.go:64] FLAG: --cgroup-root="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730881 4849 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730885 4849 flags.go:64] FLAG: --client-ca-file="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730889 4849 flags.go:64] FLAG: --cloud-config="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730893 4849 flags.go:64] FLAG: --cloud-provider="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730897 4849 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730904 4849 flags.go:64] FLAG: --cluster-domain="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730907 4849 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730911 4849 flags.go:64] FLAG: --config-dir="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730915 4849 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730919 4849 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730924 4849 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730928 4849 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730932 4849 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730936 4849 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730940 4849 flags.go:64] FLAG: --contention-profiling="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730951 4849 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730955 4849 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730959 4849 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730963 4849 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730968 4849 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730971 4849 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730975 4849 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730979 4849 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730982 4849 flags.go:64] FLAG: --enable-server="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730986 4849 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730991 4849 flags.go:64] FLAG: --event-burst="100" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.730995 4849 flags.go:64] FLAG: --event-qps="50" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731000 4849 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731004 4849 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731009 4849 flags.go:64] FLAG: --eviction-hard="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731013 4849 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731017 4849 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731021 4849 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731025 4849 flags.go:64] FLAG: --eviction-soft="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731028 4849 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731032 4849 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731036 4849 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731040 4849 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731044 4849 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731048 4849 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731052 4849 flags.go:64] FLAG: --feature-gates="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731057 4849 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731060 4849 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731064 4849 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731068 4849 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731072 4849 flags.go:64] FLAG: --healthz-port="10248" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731076 4849 flags.go:64] FLAG: --help="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731081 4849 flags.go:64] FLAG: --hostname-override="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731084 4849 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731088 4849 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731092 4849 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731095 4849 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731099 4849 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731103 4849 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731106 4849 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731110 4849 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731114 4849 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731117 4849 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731121 4849 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731125 4849 flags.go:64] FLAG: --kube-reserved="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731128 4849 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731132 4849 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731136 4849 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731139 4849 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731143 4849 flags.go:64] FLAG: --lock-file="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731147 4849 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731151 4849 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731155 4849 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731161 4849 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731165 4849 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731169 4849 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731172 4849 flags.go:64] FLAG: --logging-format="text" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731176 4849 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731180 4849 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731184 4849 flags.go:64] FLAG: --manifest-url="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731188 4849 flags.go:64] FLAG: --manifest-url-header="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731193 4849 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731197 4849 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731201 4849 flags.go:64] FLAG: --max-pods="110" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731205 4849 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731209 4849 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731212 4849 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731216 4849 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731220 4849 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731224 4849 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731228 4849 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731236 4849 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731240 4849 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731244 4849 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731248 4849 flags.go:64] FLAG: --pod-cidr="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731252 4849 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731258 4849 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731262 4849 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731266 4849 flags.go:64] FLAG: --pods-per-core="0" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731270 4849 flags.go:64] FLAG: --port="10250" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731274 4849 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731278 4849 flags.go:64] FLAG: --provider-id="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731281 4849 flags.go:64] FLAG: --qos-reserved="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731285 4849 flags.go:64] FLAG: --read-only-port="10255" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731289 4849 flags.go:64] FLAG: --register-node="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731293 4849 flags.go:64] FLAG: --register-schedulable="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731297 4849 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731304 4849 flags.go:64] FLAG: --registry-burst="10" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731308 4849 flags.go:64] FLAG: --registry-qps="5" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731312 4849 flags.go:64] FLAG: --reserved-cpus="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731315 4849 flags.go:64] FLAG: --reserved-memory="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731320 4849 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731324 4849 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731328 4849 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731331 4849 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731335 4849 flags.go:64] FLAG: --runonce="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731339 4849 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731343 4849 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731347 4849 flags.go:64] FLAG: --seccomp-default="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731351 4849 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731355 4849 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731359 4849 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731363 4849 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731367 4849 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731371 4849 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731374 4849 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731378 4849 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731382 4849 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731386 4849 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731390 4849 flags.go:64] FLAG: --system-cgroups="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731394 4849 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731399 4849 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731403 4849 flags.go:64] FLAG: --tls-cert-file="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731412 4849 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731416 4849 flags.go:64] FLAG: --tls-min-version="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731420 4849 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731424 4849 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731428 4849 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731432 4849 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731435 4849 flags.go:64] FLAG: --v="2" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731440 4849 flags.go:64] FLAG: --version="false" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731445 4849 flags.go:64] FLAG: --vmodule="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731451 4849 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.731455 4849 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731549 4849 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731554 4849 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731558 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731561 4849 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731565 4849 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731568 4849 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731571 4849 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731574 4849 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731578 4849 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731581 4849 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731584 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731587 4849 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731590 4849 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731593 4849 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731596 4849 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731600 4849 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731604 4849 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731608 4849 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731614 4849 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731618 4849 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731622 4849 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731629 4849 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731633 4849 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731636 4849 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731658 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731662 4849 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731665 4849 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731668 4849 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731672 4849 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731675 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731678 4849 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731682 4849 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731685 4849 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731688 4849 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731692 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731696 4849 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731699 4849 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731702 4849 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731706 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731709 4849 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731713 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731716 4849 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731719 4849 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731722 4849 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731726 4849 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731729 4849 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731732 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731735 4849 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731738 4849 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731741 4849 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731744 4849 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731747 4849 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731751 4849 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731755 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731759 4849 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731763 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731768 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731772 4849 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731775 4849 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731779 4849 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731782 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731785 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731788 4849 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731791 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731795 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731798 4849 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731801 4849 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731805 4849 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731808 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731811 4849 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.731815 4849 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.732188 4849 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.737583 4849 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.737614 4849 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737692 4849 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737706 4849 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737710 4849 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737714 4849 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737717 4849 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737720 4849 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737725 4849 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737728 4849 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737733 4849 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737740 4849 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737744 4849 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737748 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737752 4849 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737755 4849 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737759 4849 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737762 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737765 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737771 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737775 4849 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737778 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737781 4849 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737784 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737787 4849 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737791 4849 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737794 4849 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737797 4849 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737800 4849 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737803 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737807 4849 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737810 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737813 4849 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737816 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737819 4849 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737822 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737826 4849 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737829 4849 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737832 4849 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737836 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737841 4849 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737845 4849 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737849 4849 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737852 4849 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737855 4849 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737859 4849 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737862 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737866 4849 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737869 4849 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737872 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737875 4849 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737879 4849 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737884 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737887 4849 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737890 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737894 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737897 4849 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737900 4849 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737903 4849 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737906 4849 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737909 4849 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737913 4849 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737916 4849 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737919 4849 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737922 4849 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737926 4849 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737929 4849 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737932 4849 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737935 4849 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737939 4849 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737942 4849 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737955 4849 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.737959 4849 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.737966 4849 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738083 4849 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738090 4849 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738096 4849 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738099 4849 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738102 4849 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738106 4849 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738109 4849 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738112 4849 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738115 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738122 4849 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738125 4849 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738128 4849 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738131 4849 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738134 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738138 4849 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738141 4849 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738144 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738147 4849 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738150 4849 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738154 4849 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738158 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738162 4849 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738166 4849 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738170 4849 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738174 4849 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738177 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738180 4849 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738184 4849 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738187 4849 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738190 4849 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738194 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738198 4849 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738201 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738204 4849 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738208 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738212 4849 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738215 4849 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738218 4849 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738221 4849 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738225 4849 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738228 4849 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738232 4849 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738235 4849 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738238 4849 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738241 4849 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738244 4849 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738247 4849 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738251 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738254 4849 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738257 4849 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738260 4849 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738263 4849 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738266 4849 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738270 4849 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738274 4849 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738278 4849 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738281 4849 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738284 4849 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738287 4849 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738291 4849 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738294 4849 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738297 4849 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738300 4849 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738304 4849 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738307 4849 feature_gate.go:330] unrecognized feature gate: Example Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738310 4849 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738313 4849 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738316 4849 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738320 4849 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738323 4849 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.738326 4849 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.738332 4849 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.738502 4849 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.741672 4849 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.741765 4849 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.742556 4849 server.go:997] "Starting client certificate rotation" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.742580 4849 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.742754 4849 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-13 23:24:46.722667009 +0000 UTC Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.742826 4849 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 251h3m52.979843151s for next certificate rotation Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.754865 4849 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.757195 4849 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.766883 4849 log.go:25] "Validated CRI v1 runtime API" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.784658 4849 log.go:25] "Validated CRI v1 image API" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.785658 4849 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.788854 4849 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-12-17-31-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.788876 4849 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.800687 4849 manager.go:217] Machine: {Timestamp:2025-12-03 12:20:53.799373081 +0000 UTC m=+0.261220885 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445404 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:823dec58-3bc9-4735-a59a-6b887b18964d BootID:a8810eae-d27e-4008-b0f5-39f6de821e7a Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108168 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:a6:40:ae Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:a6:40:ae Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:c0:46:74 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:d5:c5:ec Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:b2:f9:3f Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:ac:29:a5 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:7e:d9:71:bd:fc:a4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9e:10:e0:5c:f6:6f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.800845 4849 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.800926 4849 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.801180 4849 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.801332 4849 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.801352 4849 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802039 4849 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802056 4849 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802431 4849 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802447 4849 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802746 4849 state_mem.go:36] "Initialized new in-memory state store" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.802820 4849 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.804900 4849 kubelet.go:418] "Attempting to sync node with API server" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.804917 4849 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.804943 4849 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.804961 4849 kubelet.go:324] "Adding apiserver pod source" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.804971 4849 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.807108 4849 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.807634 4849 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.808867 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.808901 4849 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.808873 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.809102 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.809049 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.809944 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.809974 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.809981 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.809987 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.809997 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810004 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810010 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810019 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810026 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810032 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810080 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810088 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810442 4849 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.810764 4849 server.go:1280] "Started kubelet" Dec 03 12:20:53 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.812687 4849 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.812934 4849 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.812435 4849 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.813300 4849 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814160 4849 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814182 4849 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814289 4849 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 08:15:48.38546098 +0000 UTC Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814657 4849 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814693 4849 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.814782 4849 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.814964 4849 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.815590 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="200ms" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.815629 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.815687 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.817850 4849 factory.go:55] Registering systemd factory Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.817874 4849 factory.go:221] Registration of the systemd container factory successfully Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818132 4849 factory.go:153] Registering CRI-O factory Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818158 4849 factory.go:221] Registration of the crio container factory successfully Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818208 4849 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818224 4849 factory.go:103] Registering Raw factory Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818237 4849 manager.go:1196] Started watching for new ooms in manager Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.817934 4849 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.198:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187db3ebada01f1a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:20:53.810741018 +0000 UTC m=+0.272588801,LastTimestamp:2025-12-03 12:20:53.810741018 +0000 UTC m=+0.272588801,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.818676 4849 manager.go:319] Starting recovery of all containers Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.819989 4849 server.go:460] "Adding debug handlers to kubelet server" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.827871 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828273 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828356 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828412 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828463 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828537 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828598 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828678 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828735 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828801 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828857 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828908 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.828971 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829571 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829606 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829618 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829629 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829658 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829670 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829682 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829692 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829703 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829715 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829726 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829736 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829970 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.829988 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830001 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830014 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830025 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830038 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830048 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830058 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830071 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830081 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830094 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830104 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830114 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830127 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830137 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830146 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830159 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830169 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830182 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830191 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830201 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830213 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830223 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830235 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830245 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830255 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830267 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830283 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.830294 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831331 4849 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831366 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831381 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831396 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831407 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831419 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831429 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831440 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831450 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831472 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831486 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831496 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831510 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831521 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831531 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831543 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831594 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831609 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831618 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831627 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831654 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831664 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831675 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831752 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831763 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831786 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831796 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831806 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831818 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831829 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831840 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831852 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831863 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831875 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831884 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831895 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831904 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831914 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831925 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831935 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831957 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831968 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831977 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831991 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.831999 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832011 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832020 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832030 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832041 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832050 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832062 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832081 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832100 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832113 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832124 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832136 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832148 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832159 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832171 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832185 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832198 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832208 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832218 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832227 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832239 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832248 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832257 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832267 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832276 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832288 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832297 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832307 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832319 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832329 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832343 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832353 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832362 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832373 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832381 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832393 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832402 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832411 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832423 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832432 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832443 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832453 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832466 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832474 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832485 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832586 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832596 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832607 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832616 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832627 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832636 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832659 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832669 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832677 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832689 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832698 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832706 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832717 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832726 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832737 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832746 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832755 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832766 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832775 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832784 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.832796 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833761 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833774 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833810 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833820 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833834 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833843 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833855 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833864 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833874 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.833886 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834586 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834634 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834666 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834676 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834685 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834694 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834702 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834710 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834719 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834727 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834735 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834743 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834753 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834761 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834770 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834779 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834787 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834795 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834803 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834812 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834843 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.834855 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835042 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835055 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835063 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835071 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835079 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835088 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835096 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835104 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835112 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835120 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835128 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835137 4849 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835145 4849 reconstruct.go:97] "Volume reconstruction finished" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835153 4849 reconciler.go:26] "Reconciler: start to sync state" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.835955 4849 manager.go:324] Recovery completed Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.843260 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844179 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844212 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844221 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844830 4849 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844850 4849 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.844868 4849 state_mem.go:36] "Initialized new in-memory state store" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.848717 4849 policy_none.go:49] "None policy: Start" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.849229 4849 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.849251 4849 state_mem.go:35] "Initializing new in-memory state store" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.853997 4849 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.855242 4849 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.855352 4849 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.855375 4849 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.855410 4849 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 12:20:53 crc kubenswrapper[4849]: W1203 12:20:53.855709 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.855767 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893305 4849 manager.go:334] "Starting Device Plugin manager" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893336 4849 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893347 4849 server.go:79] "Starting device plugin registration server" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893567 4849 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893585 4849 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893699 4849 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893782 4849 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.893795 4849 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.899394 4849 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.956198 4849 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.956307 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957196 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957343 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957591 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957658 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957864 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.957991 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958056 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958088 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958695 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958724 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958778 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958801 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958827 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.958839 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959010 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959117 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959152 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959528 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959541 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959548 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959635 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959713 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959738 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959740 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.959774 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960154 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960217 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960256 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960312 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960333 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960776 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960806 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.960813 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.994457 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.995515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.995598 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.995676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:53 crc kubenswrapper[4849]: I1203 12:20:53.995728 4849 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:20:53 crc kubenswrapper[4849]: E1203 12:20:53.996104 4849 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.198:6443: connect: connection refused" node="crc" Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.015917 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="400ms" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036164 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036201 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036219 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036237 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036254 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036301 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036316 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036330 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036344 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036370 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036390 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036411 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036454 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036507 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.036524 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.137949 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.137993 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138009 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138065 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138119 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138101 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138137 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138193 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138205 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138214 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138223 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138250 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138243 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138289 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138309 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138341 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138224 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138356 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138381 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138399 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138413 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138424 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138428 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138456 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138462 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138478 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138481 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138442 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138445 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.138588 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.196927 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.197880 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.197916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.197925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.197970 4849 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.198371 4849 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.198:6443: connect: connection refused" node="crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.281580 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.301282 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-425a1df21c34f2b07808f715a3297f13ea801c0a8508f26346e2f0e76e416c1b WatchSource:0}: Error finding container 425a1df21c34f2b07808f715a3297f13ea801c0a8508f26346e2f0e76e416c1b: Status 404 returned error can't find the container with id 425a1df21c34f2b07808f715a3297f13ea801c0a8508f26346e2f0e76e416c1b Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.301326 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.316093 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-870de278e57571be6f3d74c1453f1af458a2bebe378c79f7735711d573274c54 WatchSource:0}: Error finding container 870de278e57571be6f3d74c1453f1af458a2bebe378c79f7735711d573274c54: Status 404 returned error can't find the container with id 870de278e57571be6f3d74c1453f1af458a2bebe378c79f7735711d573274c54 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.326063 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.333782 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-778b8054fa67648d93b8335cc21aa5d467a141e4520208b790860a5ac346a461 WatchSource:0}: Error finding container 778b8054fa67648d93b8335cc21aa5d467a141e4520208b790860a5ac346a461: Status 404 returned error can't find the container with id 778b8054fa67648d93b8335cc21aa5d467a141e4520208b790860a5ac346a461 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.345015 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.349736 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.356237 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-6cd192b8540d8c02c920d17ee630fc6202fe5c7e8bf60d65987afd1032a0629d WatchSource:0}: Error finding container 6cd192b8540d8c02c920d17ee630fc6202fe5c7e8bf60d65987afd1032a0629d: Status 404 returned error can't find the container with id 6cd192b8540d8c02c920d17ee630fc6202fe5c7e8bf60d65987afd1032a0629d Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.360235 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-7915e693767477a071b4cff810d53d8998bb05e823bb778a29b2f08601745315 WatchSource:0}: Error finding container 7915e693767477a071b4cff810d53d8998bb05e823bb778a29b2f08601745315: Status 404 returned error can't find the container with id 7915e693767477a071b4cff810d53d8998bb05e823bb778a29b2f08601745315 Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.417285 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="800ms" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.599301 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.600384 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.600445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.600460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.600495 4849 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.600989 4849 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.198:6443: connect: connection refused" node="crc" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.622525 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.622775 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:54 crc kubenswrapper[4849]: W1203 12:20:54.781026 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:54 crc kubenswrapper[4849]: E1203 12:20:54.781136 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.813928 4849 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.814998 4849 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 07:15:39.158424786 +0000 UTC Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.815038 4849 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 546h54m44.343388526s for next certificate rotation Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.859942 4849 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f" exitCode=0 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.860007 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.860133 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"778b8054fa67648d93b8335cc21aa5d467a141e4520208b790860a5ac346a461"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.860230 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861159 4849 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508" exitCode=0 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861171 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861199 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"870de278e57571be6f3d74c1453f1af458a2bebe378c79f7735711d573274c54"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861268 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861454 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861494 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861503 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861766 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861793 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.861801 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.862246 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.862270 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"425a1df21c34f2b07808f715a3297f13ea801c0a8508f26346e2f0e76e416c1b"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864148 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502" exitCode=0 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864196 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864216 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7915e693767477a071b4cff810d53d8998bb05e823bb778a29b2f08601745315"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864288 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.864929 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865336 4849 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7" exitCode=0 Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865362 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865377 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6cd192b8540d8c02c920d17ee630fc6202fe5c7e8bf60d65987afd1032a0629d"} Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865447 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865957 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.865995 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.866005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.866097 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.866835 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.866861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:54 crc kubenswrapper[4849]: I1203 12:20:54.866870 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: W1203 12:20:55.161891 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:55 crc kubenswrapper[4849]: E1203 12:20:55.161967 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:55 crc kubenswrapper[4849]: E1203 12:20:55.218897 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="1.6s" Dec 03 12:20:55 crc kubenswrapper[4849]: W1203 12:20:55.228560 4849 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.198:6443: connect: connection refused Dec 03 12:20:55 crc kubenswrapper[4849]: E1203 12:20:55.228676 4849 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.198:6443: connect: connection refused" logger="UnhandledError" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.401474 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.402526 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.402568 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.402578 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.402599 4849 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:20:55 crc kubenswrapper[4849]: E1203 12:20:55.403051 4849 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.198:6443: connect: connection refused" node="crc" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.868586 4849 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f" exitCode=0 Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.868678 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.868823 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.869439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.869467 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.869479 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.871025 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"5be24cc55926545a24bb33260d0f7e64e0a066f9f4790db06bfb06b316b32a0b"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.871133 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.871834 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.871855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.871863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.873515 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.873537 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.873548 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.873600 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.874096 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.874114 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.874122 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.875480 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.875504 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.875515 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.875561 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.876096 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.876114 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.876122 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877845 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877861 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877871 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877878 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877885 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf"} Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.877938 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.878323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.878351 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.878358 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.914243 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.919556 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:55 crc kubenswrapper[4849]: I1203 12:20:55.980182 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.881700 4849 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5" exitCode=0 Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.881795 4849 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.881779 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5"} Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.881826 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.881917 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882252 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882491 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882512 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882522 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882567 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882593 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882600 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882741 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882758 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:56 crc kubenswrapper[4849]: I1203 12:20:56.882766 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.003590 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.004239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.004268 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.004276 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.004295 4849 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.886684 4849 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.886724 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887069 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4"} Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887131 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d"} Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887146 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039"} Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887155 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16"} Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887165 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2"} Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887303 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887939 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:57 crc kubenswrapper[4849]: I1203 12:20:57.887967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.005954 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.006050 4849 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.006086 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.006727 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.006754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.006771 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.098293 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.098383 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.099056 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.099078 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.099087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.211473 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.888154 4849 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.888197 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.888888 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.888917 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:58 crc kubenswrapper[4849]: I1203 12:20:58.888925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:20:59 crc kubenswrapper[4849]: I1203 12:20:59.845003 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:20:59 crc kubenswrapper[4849]: I1203 12:20:59.891727 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:20:59 crc kubenswrapper[4849]: I1203 12:20:59.892967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:20:59 crc kubenswrapper[4849]: I1203 12:20:59.893016 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:20:59 crc kubenswrapper[4849]: I1203 12:20:59.893026 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:00 crc kubenswrapper[4849]: I1203 12:21:00.402031 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 12:21:00 crc kubenswrapper[4849]: I1203 12:21:00.402204 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:00 crc kubenswrapper[4849]: I1203 12:21:00.403199 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:00 crc kubenswrapper[4849]: I1203 12:21:00.403230 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:00 crc kubenswrapper[4849]: I1203 12:21:00.403238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:01 crc kubenswrapper[4849]: I1203 12:21:01.717283 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:21:01 crc kubenswrapper[4849]: I1203 12:21:01.717467 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:01 crc kubenswrapper[4849]: I1203 12:21:01.718430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:01 crc kubenswrapper[4849]: I1203 12:21:01.718475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:01 crc kubenswrapper[4849]: I1203 12:21:01.718485 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:02 crc kubenswrapper[4849]: I1203 12:21:02.611936 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 12:21:02 crc kubenswrapper[4849]: I1203 12:21:02.612116 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:02 crc kubenswrapper[4849]: I1203 12:21:02.613205 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:02 crc kubenswrapper[4849]: I1203 12:21:02.613241 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:02 crc kubenswrapper[4849]: I1203 12:21:02.613251 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:03 crc kubenswrapper[4849]: E1203 12:21:03.899519 4849 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.251687 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.251822 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.252781 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.252828 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.252838 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.255430 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.898332 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.899107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.899132 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:04 crc kubenswrapper[4849]: I1203 12:21:04.899140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:05 crc kubenswrapper[4849]: I1203 12:21:05.814832 4849 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 12:21:06 crc kubenswrapper[4849]: I1203 12:21:06.015474 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 12:21:06 crc kubenswrapper[4849]: I1203 12:21:06.015586 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 12:21:06 crc kubenswrapper[4849]: I1203 12:21:06.019362 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 12:21:06 crc kubenswrapper[4849]: I1203 12:21:06.019429 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 12:21:07 crc kubenswrapper[4849]: I1203 12:21:07.252202 4849 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 12:21:07 crc kubenswrapper[4849]: I1203 12:21:07.252278 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.010414 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.010561 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.011074 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.011122 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.011655 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.011683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.011692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.014882 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.195475 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.195527 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.906763 4849 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.907102 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.907276 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.907918 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.908016 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:08 crc kubenswrapper[4849]: I1203 12:21:08.908077 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.011659 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.012555 4849 trace.go:236] Trace[624311314]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:20:58.139) (total time: 12872ms): Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[624311314]: ---"Objects listed" error: 12872ms (12:21:11.012) Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[624311314]: [12.872934836s] [12.872934836s] END Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.012580 4849 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.013296 4849 trace.go:236] Trace[138753642]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:20:57.831) (total time: 13181ms): Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[138753642]: ---"Objects listed" error: 13181ms (12:21:11.013) Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[138753642]: [13.181500394s] [13.181500394s] END Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.013313 4849 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.014266 4849 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.014539 4849 trace.go:236] Trace[194773979]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:20:57.803) (total time: 13211ms): Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[194773979]: ---"Objects listed" error: 13211ms (12:21:11.014) Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[194773979]: [13.211125689s] [13.211125689s] END Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.014554 4849 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.014749 4849 trace.go:236] Trace[1513530077]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 12:20:57.895) (total time: 13119ms): Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[1513530077]: ---"Objects listed" error: 13119ms (12:21:11.014) Dec 03 12:21:11 crc kubenswrapper[4849]: Trace[1513530077]: [13.119530294s] [13.119530294s] END Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.014775 4849 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.018156 4849 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.018328 4849 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.019239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.019268 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.019280 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.019296 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.019306 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.028630 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.033320 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.033355 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.033364 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.033380 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.033388 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.039545 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.041653 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.041681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.041689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.041706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.041714 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.047231 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.049245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.049273 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.049282 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.049296 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.049304 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.055148 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.057133 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.057162 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.057170 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.057184 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.057191 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.062999 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.063114 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.064056 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.064079 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.064087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.064116 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.064124 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.165532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.165565 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.165574 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.165589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.165598 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.268022 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.268055 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.268064 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.268085 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.268094 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.370524 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.370721 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.370796 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.370888 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.370992 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.473364 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.473400 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.473415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.473431 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.473440 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.574991 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.575229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.575308 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.575376 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.575437 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.678099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.678135 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.678145 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.678161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.678170 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.717547 4849 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.717595 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.780433 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.780594 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.780698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.780773 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.780826 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.813777 4849 apiserver.go:52] "Watching apiserver" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815211 4849 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815418 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815734 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815809 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815923 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815947 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815928 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.815983 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.815990 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.816146 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.816195 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.817936 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.817987 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.818220 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.818460 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.818542 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.820754 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.820774 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.820776 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.820850 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.850932 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.867256 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.875507 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.882709 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.882743 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.882754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.882774 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.882785 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.886212 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.893037 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.903114 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.911576 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.914222 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.915147 4849 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.915801 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0" exitCode=255 Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.915846 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0"} Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.918888 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.918985 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919079 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919155 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919233 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919308 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919194 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919372 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919467 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919350 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919496 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919517 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919536 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919556 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919573 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919588 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919603 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919618 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919633 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919666 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919681 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919697 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919715 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919729 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919742 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919758 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919771 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919788 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919801 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919816 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919831 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919847 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919862 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919877 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919893 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919908 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919921 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919935 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919951 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919966 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919987 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919686 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920002 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919998 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920026 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920042 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920056 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920061 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920079 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920071 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920128 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920146 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920152 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920163 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920180 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920195 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920209 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920208 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920222 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920238 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920252 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920263 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920270 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920268 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920301 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919692 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920321 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919694 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919806 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919823 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920339 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920357 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920375 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920391 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920406 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920420 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920435 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920449 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920464 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920479 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920493 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920507 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920521 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920535 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920572 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920591 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920605 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920621 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920666 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920689 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920709 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920725 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920739 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920755 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920769 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920784 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920843 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920863 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920881 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920897 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920913 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920927 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920942 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920958 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920973 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920989 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921020 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921036 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921051 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921067 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921081 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921098 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921112 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921129 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921144 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921159 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921172 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921187 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921203 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921218 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921232 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921247 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921264 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921281 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921297 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921311 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921327 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921342 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921358 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921376 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921391 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921407 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921422 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921437 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921452 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921468 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921484 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921500 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921515 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921530 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921544 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921559 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921573 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921589 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921606 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921622 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921654 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921670 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921684 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921701 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920340 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919843 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919867 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919902 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919985 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.919979 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920315 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920394 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920409 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920420 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920459 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920483 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920491 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920550 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920613 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920628 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920687 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920779 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920878 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920927 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.920967 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921071 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921088 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921219 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921247 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921257 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921326 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921361 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921407 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921422 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921467 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921570 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921902 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921594 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921605 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921658 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921704 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921775 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921906 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922063 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922084 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.921718 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922106 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922136 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922139 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922123 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922140 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922188 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922193 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922230 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922258 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922266 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922276 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922281 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922292 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922294 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922307 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922325 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922334 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922341 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922370 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922388 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922404 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922421 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922429 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922435 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922453 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922464 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922483 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922500 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922515 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922532 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922535 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922548 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922562 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922577 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922589 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922592 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922628 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922672 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922692 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922694 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922708 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922733 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922740 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922753 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922761 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922755 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922777 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922792 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922857 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922918 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.922926 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923001 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923030 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923046 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923047 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923070 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923085 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923130 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923150 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923165 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923179 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923195 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923212 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923252 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923296 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923338 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923346 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923366 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923413 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923432 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923578 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923604 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923658 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923711 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923734 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923751 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923787 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923804 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923835 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923851 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923867 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923883 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.923900 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924736 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924762 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924787 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924804 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924821 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924843 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924884 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924904 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924925 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924944 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924961 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.924982 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925000 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925030 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925050 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925069 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925085 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925103 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925120 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925135 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925196 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925207 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925217 4849 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925228 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925245 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925253 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925262 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925271 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925279 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925287 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925296 4849 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925304 4849 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925312 4849 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925320 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925329 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925337 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925345 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925353 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925362 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925370 4849 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925378 4849 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925386 4849 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925394 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925402 4849 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925404 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925411 4849 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925446 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925449 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925459 4849 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925482 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925496 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925507 4849 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925518 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925527 4849 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925536 4849 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925546 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925554 4849 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925562 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925573 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925592 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925609 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925617 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925632 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925632 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925653 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925675 4849 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925685 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925696 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925706 4849 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925715 4849 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925723 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925732 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925741 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925743 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925750 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925768 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925777 4849 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925786 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925794 4849 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925803 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925812 4849 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925820 4849 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925828 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925837 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925846 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925866 4849 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925885 4849 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925893 4849 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925902 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925946 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925999 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926015 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926293 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926323 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926447 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926562 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.926575 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.925910 4849 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927669 4849 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927681 4849 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927695 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927705 4849 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927714 4849 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927723 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927732 4849 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927740 4849 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927748 4849 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927756 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927765 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927774 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927923 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.927994 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928057 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928545 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928654 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928863 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928889 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.928903 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929201 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929419 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929425 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929489 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929558 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929572 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929725 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929871 4849 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.929955 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930073 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930283 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930329 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930336 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930342 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930606 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930722 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930771 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930791 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930799 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930820 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.930968 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931131 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931204 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931284 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931306 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931373 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931422 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.931444 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:12.431418769 +0000 UTC m=+18.893266552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931486 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931525 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931544 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931630 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931675 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931758 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.931995 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932042 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932111 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932272 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932328 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.932392 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.932456 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:12.432442535 +0000 UTC m=+18.894290318 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932494 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.932507 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.932586 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:12.432570306 +0000 UTC m=+18.894418089 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932600 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932682 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.932851 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933167 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933232 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933442 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933546 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933583 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933678 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.933892 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934195 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934219 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934502 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934665 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934815 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.934948 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.935107 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.935176 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.935259 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.935402 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.935754 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.936953 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.937106 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.937348 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.937708 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.938529 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.938555 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.938605 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.938694 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939029 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939052 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939259 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939377 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939663 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939685 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939817 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939861 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.939868 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.940430 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.941633 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.942035 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.944340 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.944557 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.945709 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.945782 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.945850 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.945941 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:12.445927949 +0000 UTC m=+18.907775732 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.945879 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.946074 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.946128 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:11 crc kubenswrapper[4849]: E1203 12:21:11.946205 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:12.446197727 +0000 UTC m=+18.908045510 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.945811 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.947856 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.948283 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.948349 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.948666 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.948855 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.948864 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.949344 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.949485 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.950750 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.951309 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.952078 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.950835 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.956047 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.959983 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.960818 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.965672 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.968444 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.971955 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.974542 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.975257 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.981671 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.985363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.985511 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.985571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.985627 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:11 crc kubenswrapper[4849]: I1203 12:21:11.985706 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:11Z","lastTransitionTime":"2025-12-03T12:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.013328 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.013437 4849 scope.go:117] "RemoveContainer" containerID="3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.028741 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.028893 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.028907 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.028928 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029225 4849 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029281 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029335 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029382 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029549 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029601 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029690 4849 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029759 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029822 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029874 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029929 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.029981 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030038 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030097 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030143 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030189 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030233 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030277 4849 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030320 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030373 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030423 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030469 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030515 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030562 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030611 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030730 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030785 4849 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030831 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030874 4849 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.030918 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031031 4849 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031091 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031142 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031194 4849 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031285 4849 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031386 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031442 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031488 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031637 4849 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031703 4849 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031807 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031862 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.031961 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032031 4849 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032081 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032133 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032183 4849 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032248 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032311 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032425 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032501 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032551 4849 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032600 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032667 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032725 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032777 4849 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032827 4849 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032872 4849 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.032920 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033017 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033149 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033214 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033313 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033363 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033419 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033467 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033513 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033557 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033605 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033693 4849 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033862 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.033986 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034055 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034148 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034198 4849 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034269 4849 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034354 4849 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034464 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034528 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034629 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034709 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034764 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034866 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.034956 4849 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035028 4849 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035089 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035160 4849 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035265 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035315 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035417 4849 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035530 4849 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035617 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035696 4849 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035758 4849 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035809 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035891 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.035973 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036039 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036094 4849 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036140 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036185 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036302 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036412 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036467 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036524 4849 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036600 4849 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036730 4849 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036803 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036848 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.036995 4849 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037141 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037234 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037306 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037388 4849 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037508 4849 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.037596 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.089191 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.089436 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.089513 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.089586 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.089703 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.090176 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-x5bqz"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.091139 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-hszbg"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.091556 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.092021 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096256 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096387 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096490 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096591 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096657 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096733 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096761 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.096815 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.105970 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.116217 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.126524 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.130916 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.136706 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138253 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d80ee321-2880-456a-9f19-c46cb0ab8128-mcd-auth-proxy-config\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138297 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnjv2\" (UniqueName: \"kubernetes.io/projected/d80ee321-2880-456a-9f19-c46cb0ab8128-kube-api-access-gnjv2\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138317 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brvfs\" (UniqueName: \"kubernetes.io/projected/a6f3b328-3994-4c31-841d-ea1af43d8326-kube-api-access-brvfs\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138331 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d80ee321-2880-456a-9f19-c46cb0ab8128-rootfs\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138355 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6f3b328-3994-4c31-841d-ea1af43d8326-hosts-file\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.138387 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d80ee321-2880-456a-9f19-c46cb0ab8128-proxy-tls\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.140226 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.148508 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.149665 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-e826a1c13bcba74be1456130695dc7905f6586406b760531e8b5ccd76b32233f WatchSource:0}: Error finding container e826a1c13bcba74be1456130695dc7905f6586406b760531e8b5ccd76b32233f: Status 404 returned error can't find the container with id e826a1c13bcba74be1456130695dc7905f6586406b760531e8b5ccd76b32233f Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.149720 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.157469 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.158910 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-784947e475b17b7fc43a0b236be41bcd4c73251459f8435024022ff758b7661e WatchSource:0}: Error finding container 784947e475b17b7fc43a0b236be41bcd4c73251459f8435024022ff758b7661e: Status 404 returned error can't find the container with id 784947e475b17b7fc43a0b236be41bcd4c73251459f8435024022ff758b7661e Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.163884 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.172847 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.179113 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.186828 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.194198 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.194235 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.194244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.194258 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.194267 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.195318 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.205441 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.212999 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.219229 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.226452 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.236171 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.238974 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6f3b328-3994-4c31-841d-ea1af43d8326-hosts-file\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239015 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d80ee321-2880-456a-9f19-c46cb0ab8128-proxy-tls\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239066 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d80ee321-2880-456a-9f19-c46cb0ab8128-mcd-auth-proxy-config\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239084 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnjv2\" (UniqueName: \"kubernetes.io/projected/d80ee321-2880-456a-9f19-c46cb0ab8128-kube-api-access-gnjv2\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239117 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brvfs\" (UniqueName: \"kubernetes.io/projected/a6f3b328-3994-4c31-841d-ea1af43d8326-kube-api-access-brvfs\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239130 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d80ee321-2880-456a-9f19-c46cb0ab8128-rootfs\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239193 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d80ee321-2880-456a-9f19-c46cb0ab8128-rootfs\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239345 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6f3b328-3994-4c31-841d-ea1af43d8326-hosts-file\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.239988 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d80ee321-2880-456a-9f19-c46cb0ab8128-mcd-auth-proxy-config\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.241763 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d80ee321-2880-456a-9f19-c46cb0ab8128-proxy-tls\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.244501 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.255075 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnjv2\" (UniqueName: \"kubernetes.io/projected/d80ee321-2880-456a-9f19-c46cb0ab8128-kube-api-access-gnjv2\") pod \"machine-config-daemon-hszbg\" (UID: \"d80ee321-2880-456a-9f19-c46cb0ab8128\") " pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.257205 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brvfs\" (UniqueName: \"kubernetes.io/projected/a6f3b328-3994-4c31-841d-ea1af43d8326-kube-api-access-brvfs\") pod \"node-resolver-x5bqz\" (UID: \"a6f3b328-3994-4c31-841d-ea1af43d8326\") " pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.260132 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.296145 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.296176 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.296184 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.296198 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.296207 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.398382 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.398417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.398431 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.398445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.398455 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.405556 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.410237 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-x5bqz" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.415519 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd80ee321_2880_456a_9f19_c46cb0ab8128.slice/crio-c1ee37c0401ecd97bec293b7448845928444ae5c7e61504bed01683e79460f0d WatchSource:0}: Error finding container c1ee37c0401ecd97bec293b7448845928444ae5c7e61504bed01683e79460f0d: Status 404 returned error can't find the container with id c1ee37c0401ecd97bec293b7448845928444ae5c7e61504bed01683e79460f0d Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.420118 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6f3b328_3994_4c31_841d_ea1af43d8326.slice/crio-1b94c297aa2a709f27d57fa3e6ccd7e40976c2b85b0b3ffe9c0e80a4f595e8c8 WatchSource:0}: Error finding container 1b94c297aa2a709f27d57fa3e6ccd7e40976c2b85b0b3ffe9c0e80a4f595e8c8: Status 404 returned error can't find the container with id 1b94c297aa2a709f27d57fa3e6ccd7e40976c2b85b0b3ffe9c0e80a4f595e8c8 Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.440737 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.440815 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.440834 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.440911 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.440953 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:13.440941614 +0000 UTC m=+19.902789397 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.440992 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.441059 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:13.441043576 +0000 UTC m=+19.902891359 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.441182 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:13.441161246 +0000 UTC m=+19.903009030 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.455908 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-2pjsx"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.456164 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.456336 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-wrlhp"] Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.460536 4849 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.460571 4849 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.460538 4849 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.460595 4849 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.460666 4849 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.460680 4849 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.460682 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: W1203 12:21:12.460718 4849 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.460733 4849 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.460901 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.463784 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.463897 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.468659 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.480039 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.487731 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.498635 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.500346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.500390 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.500399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.500417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.500426 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.508489 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.518153 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.536673 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541312 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-cnibin\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541344 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-socket-dir-parent\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541360 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541377 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-multus-certs\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541398 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541415 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541429 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-conf-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541443 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-multus\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541460 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-k8s-cni-cncf-io\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541475 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps8nh\" (UniqueName: \"kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541487 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tkh9\" (UniqueName: \"kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541501 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-os-release\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541514 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-binary-copy\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.541517 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.541545 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.541558 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541527 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-system-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541603 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-etc-kubernetes\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.541674 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:13.541617073 +0000 UTC m=+20.003464855 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541717 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-kubelet\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541748 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541763 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-hostroot\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541797 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-system-cni-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541809 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-cni-binary-copy\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541828 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541839 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-cnibin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541852 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-os-release\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541864 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-netns\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541878 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-bin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.541895 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.541986 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.542001 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.542022 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:12 crc kubenswrapper[4849]: E1203 12:21:12.542069 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:13.542053533 +0000 UTC m=+20.003901316 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.547291 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.554158 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.563702 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.573070 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.582252 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.596792 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.602054 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.602093 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.602103 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.602120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.602129 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.610525 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.624192 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642555 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-system-cni-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642586 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-cni-binary-copy\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642611 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642627 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-cnibin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642655 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-os-release\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642670 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-netns\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642683 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-bin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642704 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-cnibin\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642719 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-socket-dir-parent\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642714 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-system-cni-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642733 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642796 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-multus-certs\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642817 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642834 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-conf-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642865 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-multus\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642880 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-k8s-cni-cncf-io\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642903 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tkh9\" (UniqueName: \"kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642917 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps8nh\" (UniqueName: \"kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642935 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-os-release\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642945 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642951 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-binary-copy\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642965 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-system-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642985 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-etc-kubernetes\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642987 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-cnibin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.642999 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643029 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-kubelet\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643040 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-os-release\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643044 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-hostroot\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643062 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-netns\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643082 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-bin\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643103 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-hostroot\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643145 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-socket-dir-parent\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643187 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-os-release\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643255 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-system-cni-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643278 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-etc-kubernetes\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643302 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-multus-certs\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643105 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-cnibin\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643460 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-kubelet\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643481 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-conf-dir\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643491 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-var-lib-cni-multus\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643507 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/1b60c35d-f388-49eb-a5d8-09a6cc752575-host-run-k8s-cni-cncf-io\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643802 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f83a1f67-8a6b-4725-8da9-31a7def7be47-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.643822 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.653967 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.659903 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.669836 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.673662 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.689104 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.702709 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.703597 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.703624 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.703634 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.703660 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.703669 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.704234 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.714411 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.723051 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.743578 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.784736 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.805250 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.805289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.805298 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.805315 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.805325 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.824948 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tkrt4"] Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.826760 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.830749 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.837218 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.856095 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.875693 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.895916 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.907317 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.907354 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.907363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.907376 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.907385 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:12Z","lastTransitionTime":"2025-12-03T12:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.915493 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.918769 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.918821 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"c1d5a2d80128419531623884b14885c5973752e98912b952b1d4411099b17121"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.920229 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.921453 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.921661 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.922620 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.922658 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.922670 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"784947e475b17b7fc43a0b236be41bcd4c73251459f8435024022ff758b7661e"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.923897 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-x5bqz" event={"ID":"a6f3b328-3994-4c31-841d-ea1af43d8326","Type":"ContainerStarted","Data":"d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.923920 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-x5bqz" event={"ID":"a6f3b328-3994-4c31-841d-ea1af43d8326","Type":"ContainerStarted","Data":"1b94c297aa2a709f27d57fa3e6ccd7e40976c2b85b0b3ffe9c0e80a4f595e8c8"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.925177 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.925199 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.925212 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"c1ee37c0401ecd97bec293b7448845928444ae5c7e61504bed01683e79460f0d"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.925820 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e826a1c13bcba74be1456130695dc7905f6586406b760531e8b5ccd76b32233f"} Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.935303 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945038 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945074 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945089 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945103 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945138 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945187 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945385 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945417 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945453 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945495 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945511 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6m9x\" (UniqueName: \"kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945539 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945595 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945633 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945672 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945691 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945706 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945721 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945734 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.945749 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:12 crc kubenswrapper[4849]: I1203 12:21:12.956099 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.006283 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.010681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.010711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.010720 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.010733 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.010741 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.045990 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046140 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046199 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046208 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046218 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6m9x\" (UniqueName: \"kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046280 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046297 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046310 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046327 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046340 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046353 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046376 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046389 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046402 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046456 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046481 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046493 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046506 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046544 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046566 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046571 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046602 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046617 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046820 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046854 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.046874 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047139 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047145 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047189 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047210 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047269 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047291 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047312 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047330 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047378 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047407 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047414 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047449 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.047870 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.050838 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.088794 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6m9x\" (UniqueName: \"kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x\") pod \"ovnkube-node-tkrt4\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.106322 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.113131 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.113197 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.113207 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.113221 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.113229 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.137500 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.143383 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: W1203 12:21:13.181803 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d8dd3fd_f66b_4e40_a41b_e444e5e8b677.slice/crio-1c5165cb511d822539f770794f2151e79eb07aa6dcd53ef91316963465ab80ae WatchSource:0}: Error finding container 1c5165cb511d822539f770794f2151e79eb07aa6dcd53ef91316963465ab80ae: Status 404 returned error can't find the container with id 1c5165cb511d822539f770794f2151e79eb07aa6dcd53ef91316963465ab80ae Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.184452 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.215053 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.215086 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.215095 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.215108 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.215116 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.222092 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.269058 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.275237 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.283741 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f83a1f67-8a6b-4725-8da9-31a7def7be47-cni-binary-copy\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.283852 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-cni-binary-copy\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.316601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.316625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.316632 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.316658 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.316669 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.321737 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.362074 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.406118 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.418342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.418368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.418376 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.418388 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.418396 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.442546 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.449050 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.449138 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.449158 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.449200 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:15.44918084 +0000 UTC m=+21.911028623 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.449225 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.449265 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:15.449253857 +0000 UTC m=+21.911101640 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.449327 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.449357 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:15.449350359 +0000 UTC m=+21.911198142 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.482416 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.519973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.520003 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.520025 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.520038 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.520047 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.523619 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.550427 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.550562 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.550564 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.550730 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.550758 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.550602 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.551048 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.551076 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.551155 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:15.551137769 +0000 UTC m=+22.012985553 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.552971 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:15.552954247 +0000 UTC m=+22.014802020 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.561131 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.601606 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.615791 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.622199 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.622238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.622247 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.622260 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.622268 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.642844 4849 configmap.go:193] Couldn't get configMap openshift-multus/multus-daemon-config: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.642906 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config podName:1b60c35d-f388-49eb-a5d8-09a6cc752575 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:14.142889026 +0000 UTC m=+20.604736809 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "multus-daemon-config" (UniqueName: "kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config") pod "multus-2pjsx" (UID: "1b60c35d-f388-49eb-a5d8-09a6cc752575") : failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.661080 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.665264 4849 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.665305 4849 projected.go:194] Error preparing data for projected volume kube-api-access-7tkh9 for pod openshift-multus/multus-additional-cni-plugins-wrlhp: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.665350 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9 podName:f83a1f67-8a6b-4725-8da9-31a7def7be47 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:14.165336513 +0000 UTC m=+20.627184295 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-7tkh9" (UniqueName: "kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9") pod "multus-additional-cni-plugins-wrlhp" (UID: "f83a1f67-8a6b-4725-8da9-31a7def7be47") : failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.678027 4849 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.678061 4849 projected.go:194] Error preparing data for projected volume kube-api-access-ps8nh for pod openshift-multus/multus-2pjsx: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.678125 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh podName:1b60c35d-f388-49eb-a5d8-09a6cc752575 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:14.178091533 +0000 UTC m=+20.639939316 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-ps8nh" (UniqueName: "kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh") pod "multus-2pjsx" (UID: "1b60c35d-f388-49eb-a5d8-09a6cc752575") : failed to sync configmap cache: timed out waiting for the condition Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.706040 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.724482 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.724514 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.724522 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.724534 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.724543 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.743789 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.782080 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.821138 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.826555 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.826700 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.826780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.826845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.826907 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.856117 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.856147 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.856147 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.856217 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.856321 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:13 crc kubenswrapper[4849]: E1203 12:21:13.856392 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.859466 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.860061 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.860673 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.861235 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.861775 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.862236 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.862768 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.862938 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.863273 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.863847 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.864298 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.864775 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.865356 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.865825 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.866276 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.866743 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.867207 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.867704 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.868079 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.871031 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.871526 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.872351 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.872850 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.873234 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.874141 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.874818 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.875596 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.876700 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.877116 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.877966 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.878402 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.878812 4849 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.878900 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.880712 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.881169 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.881927 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.883211 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.883801 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.884577 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.885142 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.886076 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.886503 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.887358 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.887930 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.888765 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.889178 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.889965 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.890446 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.891373 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.891838 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.892582 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.893072 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.893509 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.894318 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.894761 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.907575 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.913865 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-2g6w4"] Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.914150 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.915375 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.928916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.928937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.928945 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.928957 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.928966 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:13Z","lastTransitionTime":"2025-12-03T12:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.930160 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027" exitCode=0 Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.930186 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.930214 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"1c5165cb511d822539f770794f2151e79eb07aa6dcd53ef91316963465ab80ae"} Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.954784 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d091257-d0f1-4248-b29a-7ce399629cb0-serviceca\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.954983 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d091257-d0f1-4248-b29a-7ce399629cb0-host\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.955075 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5lx9\" (UniqueName: \"kubernetes.io/projected/0d091257-d0f1-4248-b29a-7ce399629cb0-kube-api-access-m5lx9\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.955777 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.975588 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 12:21:13 crc kubenswrapper[4849]: I1203 12:21:13.994953 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.015499 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.030863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.030895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.030906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.030918 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.030926 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.047962 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.055028 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.055507 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d091257-d0f1-4248-b29a-7ce399629cb0-serviceca\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.055579 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d091257-d0f1-4248-b29a-7ce399629cb0-host\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.055624 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5lx9\" (UniqueName: \"kubernetes.io/projected/0d091257-d0f1-4248-b29a-7ce399629cb0-kube-api-access-m5lx9\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.055830 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0d091257-d0f1-4248-b29a-7ce399629cb0-host\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.056703 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0d091257-d0f1-4248-b29a-7ce399629cb0-serviceca\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.108305 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5lx9\" (UniqueName: \"kubernetes.io/projected/0d091257-d0f1-4248-b29a-7ce399629cb0-kube-api-access-m5lx9\") pod \"node-ca-2g6w4\" (UID: \"0d091257-d0f1-4248-b29a-7ce399629cb0\") " pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.121846 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.132610 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.132632 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.132640 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.132671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.132679 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.157041 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.157583 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/1b60c35d-f388-49eb-a5d8-09a6cc752575-multus-daemon-config\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.162461 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.201870 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.224436 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-2g6w4" Dec 03 12:21:14 crc kubenswrapper[4849]: W1203 12:21:14.233099 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d091257_d0f1_4248_b29a_7ce399629cb0.slice/crio-747eabc49999e8d4010fcd8f1e5dfc5131bc434283751aae895dbe7cbf1a4bdb WatchSource:0}: Error finding container 747eabc49999e8d4010fcd8f1e5dfc5131bc434283751aae895dbe7cbf1a4bdb: Status 404 returned error can't find the container with id 747eabc49999e8d4010fcd8f1e5dfc5131bc434283751aae895dbe7cbf1a4bdb Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.233921 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.233948 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.233956 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.233967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.233975 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.239697 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.257938 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tkh9\" (UniqueName: \"kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.257982 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps8nh\" (UniqueName: \"kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.258081 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.260603 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tkh9\" (UniqueName: \"kubernetes.io/projected/f83a1f67-8a6b-4725-8da9-31a7def7be47-kube-api-access-7tkh9\") pod \"multus-additional-cni-plugins-wrlhp\" (UID: \"f83a1f67-8a6b-4725-8da9-31a7def7be47\") " pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.260973 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps8nh\" (UniqueName: \"kubernetes.io/projected/1b60c35d-f388-49eb-a5d8-09a6cc752575-kube-api-access-ps8nh\") pod \"multus-2pjsx\" (UID: \"1b60c35d-f388-49eb-a5d8-09a6cc752575\") " pod="openshift-multus/multus-2pjsx" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.262732 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.274689 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-2pjsx" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.281409 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.281542 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" Dec 03 12:21:14 crc kubenswrapper[4849]: W1203 12:21:14.285952 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b60c35d_f388_49eb_a5d8_09a6cc752575.slice/crio-e5fc753ae9135b99116606114925f67e98d07aa1483b48fd241b3d82cbdb917e WatchSource:0}: Error finding container e5fc753ae9135b99116606114925f67e98d07aa1483b48fd241b3d82cbdb917e: Status 404 returned error can't find the container with id e5fc753ae9135b99116606114925f67e98d07aa1483b48fd241b3d82cbdb917e Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.303111 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.338635 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.338684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.338693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.338707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.338716 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.345985 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.387434 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.421834 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.440892 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.440925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.440934 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.440945 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.440954 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.462115 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.502292 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542165 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542915 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.542947 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.582765 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.621863 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.645197 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.645236 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.645247 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.645260 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.645268 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.661438 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.702323 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.742335 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.747724 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.747757 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.747765 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.747777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.747787 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.784381 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.819728 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.849973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.850019 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.850028 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.850044 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.850053 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.861908 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.901672 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.934454 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2g6w4" event={"ID":"0d091257-d0f1-4248-b29a-7ce399629cb0","Type":"ContainerStarted","Data":"75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.934501 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-2g6w4" event={"ID":"0d091257-d0f1-4248-b29a-7ce399629cb0","Type":"ContainerStarted","Data":"747eabc49999e8d4010fcd8f1e5dfc5131bc434283751aae895dbe7cbf1a4bdb"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.935756 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerStarted","Data":"336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.935814 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerStarted","Data":"e5fc753ae9135b99116606114925f67e98d07aa1483b48fd241b3d82cbdb917e"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.937317 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8" exitCode=0 Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.937371 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.937388 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerStarted","Data":"a5eff5e5fca7162f3d45af3f47c733590a18b4321b9f36443eb85afe5e7a1c2d"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.940954 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.940990 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.941000 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.941008 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.941030 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.941038 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.942291 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.944032 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.952370 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.952398 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.952406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.952419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.952428 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:14Z","lastTransitionTime":"2025-12-03T12:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:14 crc kubenswrapper[4849]: I1203 12:21:14.980428 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.021227 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.054129 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.054160 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.054170 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.054181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.054190 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.061445 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.100702 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.145635 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.159467 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.159499 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.159507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.159520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.159528 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.181681 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.222076 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.261985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.262025 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.262035 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.262048 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.262056 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.262183 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.302940 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.341909 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.363950 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.363976 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.363984 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.363995 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.364043 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.381414 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.422484 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.461746 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.466057 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.466094 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.466102 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.466115 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.466125 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.468370 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.468474 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.468500 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.468543 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:19.468529543 +0000 UTC m=+25.930377326 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.468547 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.468598 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.468608 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:19.468592041 +0000 UTC m=+25.930439825 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.468632 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:19.468622849 +0000 UTC m=+25.930470632 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.499803 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.545586 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568348 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568378 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568386 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568407 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568830 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.568862 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.568947 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.568982 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.568994 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.569050 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:19.569035412 +0000 UTC m=+26.030883196 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.568951 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.569081 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.569091 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.569126 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:19.569115042 +0000 UTC m=+26.030962815 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.579977 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.622873 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.659712 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.670863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.670898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.670911 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.670927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.670935 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.710588 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.749159 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.772305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.772333 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.772342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.772354 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.772363 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.781750 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.856454 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.856480 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.856466 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.856580 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.856699 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:15 crc kubenswrapper[4849]: E1203 12:21:15.856779 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.875413 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.875448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.875457 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.875469 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.875479 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.947261 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59" exitCode=0 Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.947350 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.960712 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.972074 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.977323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.977351 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.977361 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.977373 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.977382 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:15Z","lastTransitionTime":"2025-12-03T12:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.983162 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:15 crc kubenswrapper[4849]: I1203 12:21:15.993116 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.000582 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:15Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.025192 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.059934 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.079249 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.079278 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.079287 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.079298 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.079307 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.105792 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.142336 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.180878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.180911 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.180920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.180933 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.180941 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.181372 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.221172 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.260181 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.283140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.283165 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.283173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.283186 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.283194 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.303687 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.350351 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.383164 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.385284 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.385313 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.385322 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.385333 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.385341 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.487325 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.487352 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.487362 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.487373 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.487381 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.589308 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.589344 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.589354 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.589367 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.589375 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.691030 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.691069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.691079 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.691092 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.691101 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.792809 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.792836 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.792844 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.792857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.792865 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.894071 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.894100 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.894109 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.894120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.894128 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.953297 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.954924 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b" exitCode=0 Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.954958 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.963759 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.972005 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.988843 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.995430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.995458 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.995467 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.995479 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.995488 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:16Z","lastTransitionTime":"2025-12-03T12:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:16 crc kubenswrapper[4849]: I1203 12:21:16.997946 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:16Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.006569 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.016140 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.024594 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.033782 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.043922 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.051957 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.060050 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.068813 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.076666 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.088342 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.095781 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.096896 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.096986 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.097059 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.097115 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.097178 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.199469 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.199588 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.199598 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.199612 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.199620 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.301789 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.301830 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.301838 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.301849 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.301857 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.404023 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.404053 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.404061 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.404073 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.404081 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.506155 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.506180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.506189 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.506201 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.506208 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.607885 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.607912 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.607919 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.607930 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.607937 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.710248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.710273 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.710281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.710293 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.710300 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.812780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.812835 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.812845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.812855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.812864 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.856465 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.856486 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:17 crc kubenswrapper[4849]: E1203 12:21:17.856573 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.856590 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:17 crc kubenswrapper[4849]: E1203 12:21:17.856694 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:17 crc kubenswrapper[4849]: E1203 12:21:17.856747 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.915281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.915304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.915312 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.915323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.915332 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:17Z","lastTransitionTime":"2025-12-03T12:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.960924 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252" exitCode=0 Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.960963 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252"} Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.969123 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.984681 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:17 crc kubenswrapper[4849]: I1203 12:21:17.993842 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:17Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.003765 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.011925 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.017478 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.017497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.017509 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.017520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.017527 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.020099 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.027730 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.035129 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.043198 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.055476 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.064072 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.074440 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.082625 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.091281 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.100440 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.119659 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.119688 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.119697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.119712 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.119729 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.221508 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.221550 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.221560 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.221577 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.221587 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.323346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.323374 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.323383 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.323394 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.323403 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.425531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.425678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.425686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.425700 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.425709 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.528092 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.528123 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.528131 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.528143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.528151 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.630350 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.630385 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.630393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.630404 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.630413 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.732660 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.732701 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.732710 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.732723 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.732732 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.834939 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.834974 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.834982 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.834998 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.835006 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.936281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.936312 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.936321 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.936343 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.936351 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:18Z","lastTransitionTime":"2025-12-03T12:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.966458 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.966702 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.966736 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.966748 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.968980 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171" exitCode=0 Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.969043 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171"} Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.976445 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.983149 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.983237 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.987381 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:18 crc kubenswrapper[4849]: I1203 12:21:18.995502 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:18Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.004676 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.011904 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.025241 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.032520 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.038238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.038261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.038269 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.038279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.038286 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.041064 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.049103 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.056613 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.064337 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.071850 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.080010 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.093544 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.102996 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.111004 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.119301 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.131997 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140543 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140705 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140741 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140757 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.140765 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.148527 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.156112 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.164064 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.173597 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.183099 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.191239 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.199332 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.207120 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.220525 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.238821 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.242371 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.242398 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.242405 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.242418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.242426 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.258833 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.344680 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.344707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.344716 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.344728 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.344736 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.446393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.446420 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.446428 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.446439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.446447 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.502944 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.503008 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.503049 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.50302875 +0000 UTC m=+33.964876543 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.503068 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.503092 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.503102 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.503091199 +0000 UTC m=+33.964938981 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.503201 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.503241 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.503232003 +0000 UTC m=+33.965079787 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.548215 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.548235 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.548242 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.548252 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.548259 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.604120 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.604153 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604245 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604259 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604268 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604267 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604288 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604295 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.604286936 +0000 UTC m=+34.066134719 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604300 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.604331 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.60432128 +0000 UTC m=+34.066169073 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.649532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.649558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.649566 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.649586 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.649593 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.751302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.751324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.751331 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.751342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.751351 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.853116 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.853142 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.853150 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.853180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.853188 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.856346 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.856421 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.856683 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.856731 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.856638 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:19 crc kubenswrapper[4849]: E1203 12:21:19.856787 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.955502 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.955546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.955555 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.955567 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.955596 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:19Z","lastTransitionTime":"2025-12-03T12:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.973305 4849 generic.go:334] "Generic (PLEG): container finished" podID="f83a1f67-8a6b-4725-8da9-31a7def7be47" containerID="3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e" exitCode=0 Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.973718 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerDied","Data":"3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e"} Dec 03 12:21:19 crc kubenswrapper[4849]: I1203 12:21:19.983741 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.000215 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:19Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.010679 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.021554 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.030971 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.039612 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.051234 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.056932 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.056967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.056975 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.056986 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.056994 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.061235 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.070666 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.080366 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.087805 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.101554 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.110776 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.121243 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.130555 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.159044 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.159076 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.159084 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.159098 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.159109 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.261471 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.261508 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.261516 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.261528 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.261539 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.363381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.363415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.363423 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.363435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.363444 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.465839 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.465868 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.465876 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.465887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.465895 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.567556 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.567592 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.567601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.567616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.567624 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.669287 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.669316 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.669324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.669336 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.669344 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.770692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.770712 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.770723 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.770732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.770739 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.872101 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.872131 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.872139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.872150 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.872159 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.973806 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.973838 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.973847 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.973857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.973865 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:20Z","lastTransitionTime":"2025-12-03T12:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.977872 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" event={"ID":"f83a1f67-8a6b-4725-8da9-31a7def7be47","Type":"ContainerStarted","Data":"7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.979091 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/0.log" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.981429 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe" exitCode=1 Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.981457 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe"} Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.981884 4849 scope.go:117] "RemoveContainer" containerID="b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.988891 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:20 crc kubenswrapper[4849]: I1203 12:21:20.997358 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:20Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.004754 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.011299 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.024327 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.032619 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.040128 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.047091 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.055364 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.067851 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.075318 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.075343 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.075351 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.075363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.075371 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.076826 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.085151 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.095363 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.104377 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.113400 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.120716 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.132529 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.141042 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.149461 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.157051 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.165226 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.174901 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.177219 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.177264 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.177273 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.177286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.177295 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.186448 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.194868 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.204547 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.219986 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.224121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.224156 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.224174 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.224188 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.224198 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.239432 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"message\\\":\\\"P (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 12:21:20.335216 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 12:21:20.335347 6108 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335354 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 12:21:20.335410 6108 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335417 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 12:21:20.335452 6108 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 12:21:20.335624 6108 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335708 6108 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335738 6108 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 12:21:20.335937 6108 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.239759 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.242274 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.242304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.242313 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.242327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.242335 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.246379 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.250336 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.252481 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.252507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.252515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.252526 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.252534 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.255487 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.260042 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.262432 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.262468 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.262479 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.262493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.262500 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.264249 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.270014 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.272693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.272728 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.272738 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.272751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.272766 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.281798 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.281903 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.282804 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.282839 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.282848 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.282860 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.282870 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.384863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.384897 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.384906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.384920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.384932 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.486506 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.486538 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.486546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.486557 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.486565 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.588319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.588348 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.588356 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.588368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.588378 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.690416 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.690446 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.690455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.690467 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.690476 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.792504 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.792544 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.792552 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.792563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.792571 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.855530 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.855546 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.855533 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.855625 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.855753 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.855814 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.893774 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.893812 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.893820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.893833 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.893841 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.984366 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/1.log" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.984771 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/0.log" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.986547 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28" exitCode=1 Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.986575 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.986611 4849 scope.go:117] "RemoveContainer" containerID="b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.986997 4849 scope.go:117] "RemoveContainer" containerID="69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28" Dec 03 12:21:21 crc kubenswrapper[4849]: E1203 12:21:21.987129 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.995011 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.995048 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.995058 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.995069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.995077 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:21Z","lastTransitionTime":"2025-12-03T12:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:21 crc kubenswrapper[4849]: I1203 12:21:21.997580 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.010698 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.018870 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.028427 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.036017 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.043762 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.053202 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.059236 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.070377 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7e04337c4a7d95366dd9607dcc8a0444f93da2a7b1488a2b5251dee21751cfe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"message\\\":\\\"P (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1203 12:21:20.335216 6108 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 12:21:20.335347 6108 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335354 6108 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 12:21:20.335410 6108 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335417 6108 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 12:21:20.335452 6108 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 12:21:20.335624 6108 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335708 6108 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 12:21:20.335738 6108 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1203 12:21:20.335937 6108 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.076602 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.085061 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.092555 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.097223 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.097252 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.097260 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.097272 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.097280 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.100380 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.110219 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.116862 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:22Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.199460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.199485 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.199493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.199503 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.199512 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.301396 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.301475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.301484 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.301494 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.301501 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.403201 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.403235 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.403243 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.403255 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.403263 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.505503 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.505540 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.505548 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.505561 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.505569 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.607556 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.607586 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.607594 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.607606 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.607614 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.709611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.709788 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.709847 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.709901 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.709968 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.811772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.811802 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.811810 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.811821 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.811829 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.913898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.913935 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.913945 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.913958 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.913966 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:22Z","lastTransitionTime":"2025-12-03T12:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.990244 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/1.log" Dec 03 12:21:22 crc kubenswrapper[4849]: I1203 12:21:22.992340 4849 scope.go:117] "RemoveContainer" containerID="69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28" Dec 03 12:21:22 crc kubenswrapper[4849]: E1203 12:21:22.992451 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.001574 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.014359 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.015242 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.015272 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.015281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.015295 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.015303 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.023209 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.031012 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.038381 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.045837 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.054174 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.061402 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.073323 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.079310 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.086607 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.094060 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.101600 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.109342 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.115989 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.116953 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.116980 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.116997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.117144 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.117178 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.219737 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.219770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.219779 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.219793 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.219801 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.321608 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.321662 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.321673 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.321689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.321698 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.423411 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.423440 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.423448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.423460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.423468 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.525183 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.525211 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.525220 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.525230 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.525239 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.626719 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.626753 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.626762 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.626775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.626784 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.728863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.728903 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.728913 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.728927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.728938 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.830869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.830914 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.830923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.830933 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.830940 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.856359 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.856440 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:23 crc kubenswrapper[4849]: E1203 12:21:23.856557 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:23 crc kubenswrapper[4849]: E1203 12:21:23.856657 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.856665 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:23 crc kubenswrapper[4849]: E1203 12:21:23.856726 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.865581 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.873757 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.881908 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.888632 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.900399 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.908073 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.915616 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.922698 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.931741 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.932577 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.932607 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.932616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.932628 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.932638 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:23Z","lastTransitionTime":"2025-12-03T12:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.945542 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.958154 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.966163 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.973830 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.981503 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:23 crc kubenswrapper[4849]: I1203 12:21:23.990319 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.034854 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.034886 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.034895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.034907 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.034915 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.107624 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv"] Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.108168 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.110110 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.110214 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.118036 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.126703 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.134981 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.136390 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.136419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.136427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.136441 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.136449 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.145606 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.153922 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.161972 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.169681 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.177135 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.191672 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.198394 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.205995 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.213183 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.222151 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.235372 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.240709 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.240808 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.240832 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.240876 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.240895 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.241593 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.241660 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.241714 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8pwc\" (UniqueName: \"kubernetes.io/projected/108d7f33-ea04-49bb-946b-fb0041be7b1e-kube-api-access-b8pwc\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.241914 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.248532 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.255343 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.342601 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.342722 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.342774 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.343143 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8pwc\" (UniqueName: \"kubernetes.io/projected/108d7f33-ea04-49bb-946b-fb0041be7b1e-kube-api-access-b8pwc\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.343839 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.343850 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/108d7f33-ea04-49bb-946b-fb0041be7b1e-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.345895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.345921 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.345929 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.345942 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.345951 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.347369 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/108d7f33-ea04-49bb-946b-fb0041be7b1e-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.357830 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8pwc\" (UniqueName: \"kubernetes.io/projected/108d7f33-ea04-49bb-946b-fb0041be7b1e-kube-api-access-b8pwc\") pod \"ovnkube-control-plane-749d76644c-dbqpv\" (UID: \"108d7f33-ea04-49bb-946b-fb0041be7b1e\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.417410 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.447905 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.447941 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.447950 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.447966 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.447975 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.550253 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.550279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.550288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.550302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.550311 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.651992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.652046 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.652055 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.652069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.652077 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.753767 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.753802 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.753810 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.753824 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.753834 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.855782 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.855812 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.855820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.855832 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.855840 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.957730 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.957761 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.957768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.957778 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.957788 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:24Z","lastTransitionTime":"2025-12-03T12:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.997968 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" event={"ID":"108d7f33-ea04-49bb-946b-fb0041be7b1e","Type":"ContainerStarted","Data":"18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.998003 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" event={"ID":"108d7f33-ea04-49bb-946b-fb0041be7b1e","Type":"ContainerStarted","Data":"eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e"} Dec 03 12:21:24 crc kubenswrapper[4849]: I1203 12:21:24.998013 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" event={"ID":"108d7f33-ea04-49bb-946b-fb0041be7b1e","Type":"ContainerStarted","Data":"faac9d751ca2fe5da496b2c2c98c0c0b8edbb8cef9c79652bef43a259463a181"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.009634 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.028422 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.044120 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.058635 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.059756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.059780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.059788 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.059799 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.059808 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.068396 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.076993 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.089486 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.096694 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.104597 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.112759 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.121134 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.128394 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.137757 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.144939 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.152660 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.161199 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.161229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.161239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.161250 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.161258 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.167354 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.263196 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.263228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.263236 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.263248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.263257 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.365261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.365303 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.365314 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.365329 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.365338 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.466678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.466729 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.466738 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.466752 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.466761 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.517380 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-hjzzk"] Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.517727 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.517779 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.525752 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.534139 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.543671 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.552535 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.560345 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568421 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568586 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568615 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568623 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568638 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.568662 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.575525 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.582134 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.593655 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.600510 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.607371 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.613658 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.621049 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.633307 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.642198 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.649325 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.654695 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.654741 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8htm\" (UniqueName: \"kubernetes.io/projected/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-kube-api-access-c8htm\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.657782 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:25Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.669968 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.670000 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.670010 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.670035 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.670046 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.755846 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.755887 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8htm\" (UniqueName: \"kubernetes.io/projected/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-kube-api-access-c8htm\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.756008 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.756088 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:26.256071822 +0000 UTC m=+32.717919605 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.767958 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8htm\" (UniqueName: \"kubernetes.io/projected/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-kube-api-access-c8htm\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.771747 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.771784 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.771795 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.771809 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.771819 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.855584 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.855777 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.855678 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.855942 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.855624 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:25 crc kubenswrapper[4849]: E1203 12:21:25.856124 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.873686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.873808 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.873885 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.873980 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.874062 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.975513 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.975545 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.975553 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.975564 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:25 crc kubenswrapper[4849]: I1203 12:21:25.975572 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:25Z","lastTransitionTime":"2025-12-03T12:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.077510 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.077550 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.077559 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.077571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.077581 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.179260 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.179291 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.179302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.179314 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.179321 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.260043 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:26 crc kubenswrapper[4849]: E1203 12:21:26.260204 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:26 crc kubenswrapper[4849]: E1203 12:21:26.260254 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:27.260241915 +0000 UTC m=+33.722089698 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.283663 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.283692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.283701 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.283712 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.283726 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.385846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.385880 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.385911 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.385923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.385931 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.487704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.487732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.487740 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.487751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.487760 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.589624 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.589674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.589683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.589693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.589701 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.691261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.691438 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.691446 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.691457 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.691467 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.793172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.793208 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.793216 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.793228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.793237 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.856146 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:26 crc kubenswrapper[4849]: E1203 12:21:26.856240 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.894630 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.894683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.894692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.894702 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.894710 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.996528 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.996559 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.996567 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.996580 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:26 crc kubenswrapper[4849]: I1203 12:21:26.996588 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:26Z","lastTransitionTime":"2025-12-03T12:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.098211 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.098244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.098253 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.098265 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.098273 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.200015 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.200060 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.200068 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.200079 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.200087 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.268920 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.269060 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.269140 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:29.269124111 +0000 UTC m=+35.730971894 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.301286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.301318 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.301327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.301339 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.301347 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.403463 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.403498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.403508 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.403520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.403530 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.505360 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.505397 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.505405 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.505420 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.505429 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.571900 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.571974 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.571992 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.572007 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:21:43.571989315 +0000 UTC m=+50.033837098 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.572086 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.572093 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.572121 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:43.572113619 +0000 UTC m=+50.033961392 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.572133 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:43.572127966 +0000 UTC m=+50.033975749 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.606777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.606804 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.606812 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.606824 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.606831 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.672327 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.672360 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672443 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672444 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672456 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672466 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672470 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672477 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672510 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:43.67249863 +0000 UTC m=+50.134346413 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.672522 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:43.672517106 +0000 UTC m=+50.134364889 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.708978 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.709005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.709013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.709024 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.709044 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.810906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.810935 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.810944 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.810955 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.810963 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.855935 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.855935 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.856112 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.856050 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.856020 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:27 crc kubenswrapper[4849]: E1203 12:21:27.856266 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.912602 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.912634 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.912659 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.912673 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:27 crc kubenswrapper[4849]: I1203 12:21:27.912681 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:27Z","lastTransitionTime":"2025-12-03T12:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.014565 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.014601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.014612 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.014624 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.014632 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.116515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.116545 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.116553 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.116564 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.116572 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.218781 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.218812 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.218820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.218843 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.218853 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.320139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.320176 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.320185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.320197 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.320207 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.421678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.421706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.421714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.421725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.421734 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.523768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.523801 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.523811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.523823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.523832 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.625768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.625797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.625805 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.625816 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.625824 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.727366 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.727398 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.727406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.727419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.727427 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.829014 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.829060 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.829069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.829082 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.829091 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.855828 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:28 crc kubenswrapper[4849]: E1203 12:21:28.855929 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.930729 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.930762 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.930772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.930785 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:28 crc kubenswrapper[4849]: I1203 12:21:28.930795 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:28Z","lastTransitionTime":"2025-12-03T12:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.031863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.031907 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.031917 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.031932 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.031942 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.133336 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.133364 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.133371 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.133383 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.133392 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.234994 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.235049 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.235061 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.235073 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.235081 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.284190 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:29 crc kubenswrapper[4849]: E1203 12:21:29.284368 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:29 crc kubenswrapper[4849]: E1203 12:21:29.284564 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:33.28454772 +0000 UTC m=+39.746395503 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.336477 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.336506 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.336515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.336526 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.336535 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.438350 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.438381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.438389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.438402 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.438410 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.540480 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.540507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.540515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.540527 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.540537 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.642621 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.642671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.642680 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.642691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.642700 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.744734 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.744756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.744764 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.744775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.744783 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.846221 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.846262 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.846272 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.846286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.846295 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.856457 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.856492 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:29 crc kubenswrapper[4849]: E1203 12:21:29.856553 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.856466 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:29 crc kubenswrapper[4849]: E1203 12:21:29.856671 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:29 crc kubenswrapper[4849]: E1203 12:21:29.856712 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.947690 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.947811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.947878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.947943 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:29 crc kubenswrapper[4849]: I1203 12:21:29.948002 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:29Z","lastTransitionTime":"2025-12-03T12:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.049878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.049998 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.050132 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.050344 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.050487 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.152853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.152956 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.153023 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.153124 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.153179 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.254970 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.255005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.255013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.255024 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.255043 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.356664 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.356689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.356697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.356708 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.356716 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.458157 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.458188 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.458195 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.458207 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.458214 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.559714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.559745 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.559754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.559768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.559775 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.661265 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.661292 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.661301 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.661311 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.661319 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.763424 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.763466 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.763476 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.763492 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.763500 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.856359 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:30 crc kubenswrapper[4849]: E1203 12:21:30.856444 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.865444 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.865473 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.865481 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.865490 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.865498 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.967819 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.967845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.967852 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.967861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:30 crc kubenswrapper[4849]: I1203 12:21:30.967868 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:30Z","lastTransitionTime":"2025-12-03T12:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.069439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.069478 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.069486 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.069498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.069508 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.171359 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.171384 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.171392 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.171402 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.171410 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.272731 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.272772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.272780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.272794 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.272802 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.374430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.374453 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.374460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.374470 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.374478 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.476464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.476490 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.476498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.476507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.476514 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.578401 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.578422 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.578429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.578438 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.578445 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.634695 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.634732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.634741 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.634756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.634765 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.643275 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.645591 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.645624 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.645632 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.645659 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.645668 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.653506 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.655449 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.655477 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.655486 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.655497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.655505 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.663251 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.665161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.665185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.665194 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.665204 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.665212 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.672575 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.674520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.674548 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.674558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.674569 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.674576 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.682050 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.682150 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.683053 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.683082 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.683092 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.683102 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.683109 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.720849 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.732918 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.739168 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.746579 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.754329 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.761286 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.767924 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.775150 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.781959 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.786813 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.786925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.786996 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.787077 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.787475 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.789227 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.796864 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.809117 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.817443 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.824344 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.832079 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.839158 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.846751 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.855634 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.855720 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.855743 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.855762 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.855826 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:31 crc kubenswrapper[4849]: E1203 12:21:31.855896 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.855890 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:31Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.889284 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.889305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.889314 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.889324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.889332 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.991346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.991389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.991398 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.991409 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:31 crc kubenswrapper[4849]: I1203 12:21:31.991419 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:31Z","lastTransitionTime":"2025-12-03T12:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.093229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.093289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.093298 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.093311 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.093319 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.195288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.195326 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.195334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.195346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.195356 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.297063 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.297099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.297107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.297120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.297129 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.398191 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.398230 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.398239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.398251 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.398259 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.499498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.499532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.499542 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.499554 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.499563 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.601072 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.601112 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.601120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.601135 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.601145 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.702596 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.702663 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.702675 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.702689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.702699 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.804209 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.804246 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.804256 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.804269 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.804278 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.855840 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:32 crc kubenswrapper[4849]: E1203 12:21:32.855937 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.905676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.905707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.905715 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.905726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:32 crc kubenswrapper[4849]: I1203 12:21:32.905734 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:32Z","lastTransitionTime":"2025-12-03T12:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.007589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.007659 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.007671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.007685 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.007694 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.109710 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.109735 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.109742 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.109751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.109759 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.211331 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.211371 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.211381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.211393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.211402 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.313637 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.313697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.313705 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.313718 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.313727 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.317008 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:33 crc kubenswrapper[4849]: E1203 12:21:33.317118 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:33 crc kubenswrapper[4849]: E1203 12:21:33.317171 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:41.317156581 +0000 UTC m=+47.779004374 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.415610 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.415673 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.415684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.415697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.415705 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.517135 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.517172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.517180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.517192 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.517200 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.618595 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.618631 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.618693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.618732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.618742 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.720562 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.720591 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.720599 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.720611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.720619 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.822030 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.822071 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.822080 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.822089 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.822097 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.855630 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.855716 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:33 crc kubenswrapper[4849]: E1203 12:21:33.855877 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.855887 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:33 crc kubenswrapper[4849]: E1203 12:21:33.855970 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:33 crc kubenswrapper[4849]: E1203 12:21:33.856008 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.864793 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.872369 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.879816 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.885791 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.896957 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.904276 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.912540 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.918973 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.923965 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.923997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.924007 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.924020 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.924030 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:33Z","lastTransitionTime":"2025-12-03T12:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.926442 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.938566 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.946839 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.953820 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.961350 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.968628 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.976626 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.984906 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:33 crc kubenswrapper[4849]: I1203 12:21:33.992552 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:33Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.025841 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.025865 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.025872 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.025883 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.025892 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.127877 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.127906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.127913 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.127923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.127931 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.229447 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.229472 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.229480 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.229490 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.229497 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.332185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.332213 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.332228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.332240 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.332249 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.433951 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.433981 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.433989 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.433998 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.434006 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.535616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.535671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.535679 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.535691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.535699 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.637361 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.637393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.637401 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.637412 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.637420 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.739383 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.739409 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.739419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.739430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.739437 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.840771 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.840796 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.840804 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.840813 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.840822 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.856170 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:34 crc kubenswrapper[4849]: E1203 12:21:34.856272 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.943163 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.943195 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.943203 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.943215 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:34 crc kubenswrapper[4849]: I1203 12:21:34.943223 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:34Z","lastTransitionTime":"2025-12-03T12:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.045360 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.045384 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.045392 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.045401 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.045410 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.147343 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.147370 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.147378 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.147389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.147396 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.249211 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.249237 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.249245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.249254 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.249261 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.351389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.351415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.351423 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.351431 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.351438 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.452758 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.452797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.452806 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.452820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.452830 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.554088 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.554121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.554130 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.554141 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.554149 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.655950 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.655976 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.655984 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.655993 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.656002 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.757907 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.757940 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.757948 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.757960 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.757968 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.855803 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.855845 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:35 crc kubenswrapper[4849]: E1203 12:21:35.855902 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.855954 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:35 crc kubenswrapper[4849]: E1203 12:21:35.856017 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:35 crc kubenswrapper[4849]: E1203 12:21:35.856101 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.859510 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.859536 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.859545 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.859555 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.859563 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.961554 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.961581 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.961589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.961599 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:35 crc kubenswrapper[4849]: I1203 12:21:35.961607 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:35Z","lastTransitionTime":"2025-12-03T12:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.062770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.062796 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.062804 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.062814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.062821 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.163937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.163977 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.163985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.163997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.164007 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.265906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.265951 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.265959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.265975 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.265984 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.367697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.367725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.367733 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.367744 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.367751 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.468897 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.468937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.468946 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.468959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.468967 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.570545 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.570576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.570587 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.570598 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.570608 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.672483 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.672527 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.672536 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.672549 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.672556 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.774693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.774726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.774735 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.774747 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.774755 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.855677 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:36 crc kubenswrapper[4849]: E1203 12:21:36.855784 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.876141 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.876166 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.876177 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.876187 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.876195 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.978024 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.978062 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.978070 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.978081 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:36 crc kubenswrapper[4849]: I1203 12:21:36.978088 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:36Z","lastTransitionTime":"2025-12-03T12:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.079568 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.079599 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.079608 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.079618 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.079624 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.180891 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.180920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.180928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.180939 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.180947 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.283033 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.283081 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.283090 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.283103 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.283111 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.384354 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.384405 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.384415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.384427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.384434 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.486375 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.486399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.486407 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.486417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.486424 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.588062 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.588087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.588095 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.588121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.588129 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.689791 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.689836 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.689845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.689855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.689862 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.791630 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.791665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.791674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.791683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.791690 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.855872 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.856143 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.856179 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:37 crc kubenswrapper[4849]: E1203 12:21:37.856248 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.856290 4849 scope.go:117] "RemoveContainer" containerID="69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28" Dec 03 12:21:37 crc kubenswrapper[4849]: E1203 12:21:37.856334 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:37 crc kubenswrapper[4849]: E1203 12:21:37.856387 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.893411 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.893447 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.893455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.893469 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.893476 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.995019 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.995063 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.995073 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.995085 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:37 crc kubenswrapper[4849]: I1203 12:21:37.995093 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:37Z","lastTransitionTime":"2025-12-03T12:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.024853 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/1.log" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.026501 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.026870 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.035931 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.043492 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.052559 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.061656 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.073918 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.087457 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.096722 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.096756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.096765 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.096778 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.096788 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.097707 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.110406 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.132013 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.139843 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.147339 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.154438 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.162776 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.174910 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.184021 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.191857 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.198389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.198421 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.198431 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.198448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.198457 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.199695 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.300613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.300672 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.300681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.300692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.300700 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.403041 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.403079 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.403088 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.403099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.403105 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.504958 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.504995 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.505003 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.505016 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.505025 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.606537 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.606569 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.606577 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.606589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.606598 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.708315 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.708352 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.708360 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.708372 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.708380 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.810116 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.810167 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.810177 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.810191 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.810200 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.855676 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:38 crc kubenswrapper[4849]: E1203 12:21:38.855788 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.911490 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.911531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.911546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.911559 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:38 crc kubenswrapper[4849]: I1203 12:21:38.911569 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:38Z","lastTransitionTime":"2025-12-03T12:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.013788 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.013822 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.013831 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.013842 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.013850 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.029851 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/2.log" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.030357 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/1.log" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.032130 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" exitCode=1 Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.032156 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.032192 4849 scope.go:117] "RemoveContainer" containerID="69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.032613 4849 scope.go:117] "RemoveContainer" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" Dec 03 12:21:39 crc kubenswrapper[4849]: E1203 12:21:39.032752 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.040700 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.048631 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.056323 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.065934 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.073960 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.082752 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.090533 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.097160 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.108768 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://69ad0a5b63cd237ff2ac5fe2468c45a2e69d41142b4f0de44cdd0be8f65c7b28\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:21Z\\\",\\\"message\\\":\\\"uuid == {78f6184b-c7cf-436d-8cbb-4b31f8af75e8}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1203 12:21:21.557494 6260 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-cluster-version/cluster-version-operator\\\\\\\"}\\\\nF1203 12:21:21.557904 6260 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:21Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:21.557869 6260 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115347 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115355 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115366 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115375 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.115949 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.123660 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.130576 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.137110 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.144794 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.156880 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.165145 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.172440 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:39Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.216918 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.216950 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.216959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.216971 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.216980 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.319099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.319319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.319394 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.319460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.319529 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.420892 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.420924 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.420933 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.420946 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.420954 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.523292 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.523439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.523503 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.523560 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.523607 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.625482 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.625514 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.625522 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.625532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.625541 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.727417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.727452 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.727461 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.727474 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.727485 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.829564 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.829609 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.829620 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.829635 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.829661 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.855899 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.855934 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.856073 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:39 crc kubenswrapper[4849]: E1203 12:21:39.856214 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:39 crc kubenswrapper[4849]: E1203 12:21:39.856282 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:39 crc kubenswrapper[4849]: E1203 12:21:39.856344 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.934230 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.934267 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.934275 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.934287 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:39 crc kubenswrapper[4849]: I1203 12:21:39.934295 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:39Z","lastTransitionTime":"2025-12-03T12:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035440 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035476 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035486 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.035683 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/2.log" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.037923 4849 scope.go:117] "RemoveContainer" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" Dec 03 12:21:40 crc kubenswrapper[4849]: E1203 12:21:40.038043 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.046882 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.054412 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.061904 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.070340 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.078277 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.085909 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.093261 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.099402 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.110692 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.117143 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.124601 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.131337 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.137027 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.137067 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.137077 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.137088 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.137097 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.138110 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.146230 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.158087 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.166289 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.174020 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:40Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.239272 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.239302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.239311 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.239325 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.239333 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.341584 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.341621 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.341633 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.341665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.341674 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.442861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.442889 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.442897 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.442908 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.442915 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.544381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.544409 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.544417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.544429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.544441 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.645997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.646026 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.646035 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.646058 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.646066 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.747961 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.748005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.748012 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.748025 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.748035 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.849510 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.849538 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.849546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.849558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.849566 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.855993 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:40 crc kubenswrapper[4849]: E1203 12:21:40.856108 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.951832 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.951882 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.951891 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.951903 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:40 crc kubenswrapper[4849]: I1203 12:21:40.951911 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:40Z","lastTransitionTime":"2025-12-03T12:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.053517 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.053551 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.053560 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.053573 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.053582 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.154996 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.155026 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.155035 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.155055 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.155063 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.256684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.256716 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.256724 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.256736 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.256744 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.358775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.358803 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.358811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.358823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.358832 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.379076 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:41 crc kubenswrapper[4849]: E1203 12:21:41.379187 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:41 crc kubenswrapper[4849]: E1203 12:21:41.379232 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:21:57.379219366 +0000 UTC m=+63.841067150 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.460266 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.460691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.460754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.460808 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.460857 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.562382 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.562406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.562415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.562425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.562432 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.664871 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.664903 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.664923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.664937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.664947 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.767312 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.767344 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.767352 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.767363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.767371 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.856385 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.856408 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.856423 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:41 crc kubenswrapper[4849]: E1203 12:21:41.856487 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:41 crc kubenswrapper[4849]: E1203 12:21:41.856545 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:41 crc kubenswrapper[4849]: E1203 12:21:41.856597 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.868437 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.868481 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.868493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.868507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.868515 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.970464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.970495 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.970502 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.970513 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:41 crc kubenswrapper[4849]: I1203 12:21:41.970523 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:41Z","lastTransitionTime":"2025-12-03T12:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.000423 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.000455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.000466 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.000477 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.000485 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.008852 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.011092 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.011118 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.011126 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.011137 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.011146 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.019031 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.021017 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.021045 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.021063 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.021072 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.021080 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.028419 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.030389 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.030417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.030425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.030435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.030442 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.037928 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.039881 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.039908 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.039917 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.039927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.039934 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.047633 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:42Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.047775 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.072476 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.072501 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.072509 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.072520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.072529 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.174237 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.174271 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.174281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.174293 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.174301 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.275623 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.275665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.275674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.275684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.275692 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.377539 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.377568 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.377575 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.377584 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.377591 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.479585 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.479613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.479621 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.479631 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.479638 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.581363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.581417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.581429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.581444 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.581453 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.683429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.683459 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.683466 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.683476 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.683483 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.785125 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.785164 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.785173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.785185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.785193 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.856300 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:42 crc kubenswrapper[4849]: E1203 12:21:42.856418 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.887043 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.887087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.887096 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.887107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.887115 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.988208 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.988238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.988248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.988258 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:42 crc kubenswrapper[4849]: I1203 12:21:42.988265 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:42Z","lastTransitionTime":"2025-12-03T12:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.089502 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.089532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.089539 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.089550 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.089557 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.191067 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.191095 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.191104 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.191117 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.191124 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.292681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.292706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.292713 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.292723 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.292730 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.394775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.394803 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.394811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.394823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.394830 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.496238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.496278 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.496286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.496297 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.496304 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.597914 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.597947 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.597955 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.597965 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.597972 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.599332 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.599451 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.599468 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:22:15.599452608 +0000 UTC m=+82.061300392 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.599598 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.599514 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.599808 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:22:15.599794702 +0000 UTC m=+82.061642485 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.599689 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.599904 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:22:15.59989465 +0000 UTC m=+82.061742433 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.699849 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.699966 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.699946 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700015 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.700023 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.700037 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.700062 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.700072 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700027 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700166 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700180 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700188 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700188 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:22:15.700176828 +0000 UTC m=+82.162024611 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.700222 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:22:15.700214569 +0000 UTC m=+82.162062352 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.700348 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.801418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.801462 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.801471 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.801482 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.801490 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.856229 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.856278 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.856348 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.856402 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.856523 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:43 crc kubenswrapper[4849]: E1203 12:21:43.856707 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.882223 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.895552 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.903227 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.903327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.903396 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.903494 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.903574 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:43Z","lastTransitionTime":"2025-12-03T12:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.907617 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.922704 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.931469 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.940487 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.949832 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.958225 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.967168 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.975462 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.982957 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.991122 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:43 crc kubenswrapper[4849]: I1203 12:21:43.999368 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:43Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.004775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.004800 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.004820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.004832 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.004840 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.006091 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:44Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.018777 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:44Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.025436 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:44Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.033452 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:44Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.106862 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.106915 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.106926 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.106941 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.106949 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.208683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.208717 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.208725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.208736 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.208745 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.310434 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.310479 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.310489 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.310502 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.310511 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.412815 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.412850 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.412858 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.412871 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.412880 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.514949 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.514982 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.514992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.515005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.515013 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.616699 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.616734 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.616743 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.616756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.616764 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.719190 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.719222 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.719230 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.719243 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.719251 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.820803 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.820829 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.820837 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.820846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.820854 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.856527 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:44 crc kubenswrapper[4849]: E1203 12:21:44.856606 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.922528 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.922566 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.922576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.922586 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:44 crc kubenswrapper[4849]: I1203 12:21:44.922594 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:44Z","lastTransitionTime":"2025-12-03T12:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.024707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.024741 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.024749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.024762 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.024770 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.126831 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.126857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.126865 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.126875 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.126881 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.228660 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.228691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.228699 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.228710 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.228720 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.329886 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.329916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.329923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.329934 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.329942 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.432148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.432180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.432190 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.432201 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.432210 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.533824 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.533855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.533863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.533875 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.533886 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.635732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.635760 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.635768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.635780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.635789 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.737387 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.737425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.737433 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.737446 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.737454 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.839445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.839483 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.839492 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.839507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.839516 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.855892 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.855938 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:45 crc kubenswrapper[4849]: E1203 12:21:45.855978 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:45 crc kubenswrapper[4849]: E1203 12:21:45.856115 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.856311 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:45 crc kubenswrapper[4849]: E1203 12:21:45.856391 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.941616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.941656 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.941666 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.941677 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:45 crc kubenswrapper[4849]: I1203 12:21:45.941687 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:45Z","lastTransitionTime":"2025-12-03T12:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.043901 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.043933 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.043942 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.043953 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.043961 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.145265 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.145293 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.145301 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.145312 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.145321 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.247130 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.247152 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.247160 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.247169 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.247201 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.348685 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.348711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.348720 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.348743 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.348753 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.450547 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.450576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.450584 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.450596 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.450604 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.552301 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.552324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.552331 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.552342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.552349 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.654714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.654756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.654765 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.654775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.654783 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.756558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.756594 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.756614 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.756626 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.756634 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.856029 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:46 crc kubenswrapper[4849]: E1203 12:21:46.856155 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.858157 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.858180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.858188 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.858198 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.858205 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.959501 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.959533 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.959541 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.959552 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:46 crc kubenswrapper[4849]: I1203 12:21:46.959560 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:46Z","lastTransitionTime":"2025-12-03T12:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.061464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.061491 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.061498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.061508 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.061518 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.163208 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.163237 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.163261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.163274 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.163282 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.265393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.265424 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.265435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.265447 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.265456 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.366668 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.366711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.366719 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.366731 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.366739 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.468207 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.468235 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.468243 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.468253 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.468261 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.570003 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.570031 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.570039 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.570053 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.570070 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.672013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.672049 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.672067 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.672080 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.672090 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.774204 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.774239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.774248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.774259 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.774268 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.855854 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.855894 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:47 crc kubenswrapper[4849]: E1203 12:21:47.855949 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.855863 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:47 crc kubenswrapper[4849]: E1203 12:21:47.856051 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:47 crc kubenswrapper[4849]: E1203 12:21:47.856148 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.875664 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.875691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.875701 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.875712 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.875719 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.977880 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.977911 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.977920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.977931 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:47 crc kubenswrapper[4849]: I1203 12:21:47.977939 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:47Z","lastTransitionTime":"2025-12-03T12:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.079832 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.079860 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.079869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.079880 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.079897 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.101951 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.108039 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.111706 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.119975 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.128255 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.137364 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.144919 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.152814 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.160412 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.167378 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.173675 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.181181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.181214 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.181222 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.181233 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.181242 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.185051 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.192245 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.198752 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.205039 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.212666 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.225982 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.234948 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.241555 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:48Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.282988 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.283013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.283022 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.283033 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.283040 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.385123 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.385150 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.385158 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.385168 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.385193 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.486615 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.486664 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.486674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.486687 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.486696 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.588464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.588497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.588507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.588520 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.588529 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.690450 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.690481 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.690489 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.690501 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.690510 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.791685 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.791734 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.791743 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.791754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.791762 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.855750 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:48 crc kubenswrapper[4849]: E1203 12:21:48.855837 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.893465 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.893504 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.893511 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.893525 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.893533 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.995819 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.995854 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.995879 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.995894 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:48 crc kubenswrapper[4849]: I1203 12:21:48.995904 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:48Z","lastTransitionTime":"2025-12-03T12:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.098037 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.098089 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.098100 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.098113 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.098124 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.200167 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.200200 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.200210 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.200223 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.200232 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.301623 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.301665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.301673 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.301682 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.301690 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.403715 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.403758 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.403767 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.403779 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.403787 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.505514 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.505546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.505555 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.505565 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.505574 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.607332 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.607356 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.607363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.607372 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.607379 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.709512 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.709549 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.709558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.709576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.709585 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.810966 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.810997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.811005 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.811017 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.811025 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.856283 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.856317 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:49 crc kubenswrapper[4849]: E1203 12:21:49.856371 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.856290 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:49 crc kubenswrapper[4849]: E1203 12:21:49.856474 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:49 crc kubenswrapper[4849]: E1203 12:21:49.856503 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.912969 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.913021 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.913032 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.913043 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:49 crc kubenswrapper[4849]: I1203 12:21:49.913057 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:49Z","lastTransitionTime":"2025-12-03T12:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.014793 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.014846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.014858 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.014872 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.014882 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.116608 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.116670 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.116680 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.116692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.116700 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.218493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.218519 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.218536 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.218548 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.218555 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.320307 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.320515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.320524 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.320533 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.320540 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.422619 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.422677 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.422689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.422703 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.422713 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.524300 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.524327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.524335 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.524346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.524353 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.626352 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.626390 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.626399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.626412 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.626421 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.728199 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.728237 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.728246 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.728261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.728270 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.830671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.830704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.830714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.830727 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.830736 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.856264 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:50 crc kubenswrapper[4849]: E1203 12:21:50.856356 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.932613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.932659 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.932669 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.932681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:50 crc kubenswrapper[4849]: I1203 12:21:50.932690 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:50Z","lastTransitionTime":"2025-12-03T12:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.034680 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.034713 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.034721 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.034734 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.034743 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.136591 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.136676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.136687 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.136701 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.136709 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.238569 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.238609 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.238618 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.238630 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.238660 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.340752 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.340805 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.340814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.340826 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.340834 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.442516 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.442551 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.442559 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.442570 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.442578 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.544419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.544452 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.544460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.544471 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.544480 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.646627 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.646682 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.646693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.646704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.646714 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.748364 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.748400 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.748411 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.748423 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.748432 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.850232 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.850263 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.850272 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.850283 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.850291 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.856187 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.856203 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:51 crc kubenswrapper[4849]: E1203 12:21:51.856267 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.856190 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:51 crc kubenswrapper[4849]: E1203 12:21:51.856377 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:51 crc kubenswrapper[4849]: E1203 12:21:51.856425 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.952093 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.952131 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.952143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.952156 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:51 crc kubenswrapper[4849]: I1203 12:21:51.952165 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:51Z","lastTransitionTime":"2025-12-03T12:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.053497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.053558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.053569 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.053582 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.053591 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.155427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.155455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.155464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.155474 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.155481 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.257175 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.257215 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.257225 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.257235 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.257241 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.359039 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.359089 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.359097 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.359108 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.359115 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.432674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.432698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.432707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.432715 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.432724 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.444417 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.446848 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.446868 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.446877 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.446887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.446893 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.454842 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.456928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.456969 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.456978 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.456987 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.456993 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.465408 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.467783 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.467807 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.467814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.467825 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.467833 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.475664 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.477706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.477749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.477760 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.477770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.477778 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.485476 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:52Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.485576 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.486437 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.486468 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.486476 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.486489 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.486498 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.588007 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.588042 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.588050 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.588076 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.588085 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.689962 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.689992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.690001 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.690013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.690021 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.791541 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.791564 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.791572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.791582 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.791589 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.856500 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.857672 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.858081 4849 scope.go:117] "RemoveContainer" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" Dec 03 12:21:52 crc kubenswrapper[4849]: E1203 12:21:52.858193 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.893568 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.893611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.893622 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.893635 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.893658 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.995728 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.995755 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.995763 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.995774 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:52 crc kubenswrapper[4849]: I1203 12:21:52.995782 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:52Z","lastTransitionTime":"2025-12-03T12:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.097867 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.097905 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.097915 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.097930 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.097938 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.199531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.199563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.199572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.199585 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.199593 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.301611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.301664 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.301676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.301689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.301697 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.403748 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.403777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.403786 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.403797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.403833 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.505779 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.505820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.505829 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.505841 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.505850 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.607576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.607605 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.607614 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.607626 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.607634 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.709678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.709727 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.709736 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.709749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.709758 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.811724 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.811746 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.811754 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.811768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.811775 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.855572 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.855659 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:53 crc kubenswrapper[4849]: E1203 12:21:53.855692 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.855585 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:53 crc kubenswrapper[4849]: E1203 12:21:53.855747 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:53 crc kubenswrapper[4849]: E1203 12:21:53.855802 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.865551 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.873583 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.882691 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.890352 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.902829 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.912912 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.913273 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.913302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.913311 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.913323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.913331 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:53Z","lastTransitionTime":"2025-12-03T12:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.921250 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.928748 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.942352 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.950193 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.957521 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.969438 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.976079 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.983944 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.991968 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:53 crc kubenswrapper[4849]: I1203 12:21:53.999872 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:53Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.006912 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:54Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.013624 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:54Z is after 2025-08-24T17:21:41Z" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.014661 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.014684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.014693 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.014706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.014714 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.116538 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.116573 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.116583 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.116594 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.116606 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.218631 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.218683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.218694 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.218708 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.218718 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.320248 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.320278 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.320286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.320297 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.320306 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.422119 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.422153 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.422161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.422174 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.422188 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.523629 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.523677 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.523686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.523696 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.523704 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.625096 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.625145 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.625169 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.625181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.625190 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.727176 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.727211 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.727221 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.727233 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.727241 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.828406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.828439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.828449 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.828461 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.828469 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.856317 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:54 crc kubenswrapper[4849]: E1203 12:21:54.856417 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.930711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.930742 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.930750 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.930761 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:54 crc kubenswrapper[4849]: I1203 12:21:54.930788 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:54Z","lastTransitionTime":"2025-12-03T12:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.033079 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.033123 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.033134 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.033144 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.033152 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.135363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.135399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.135407 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.135420 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.135429 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.237587 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.237631 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.237665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.237681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.237692 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.339726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.339749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.339758 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.339769 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.339776 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.442098 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.442185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.442201 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.442217 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.442226 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.543687 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.543738 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.543748 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.543757 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.543764 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.645598 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.645658 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.645672 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.645688 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.645699 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.747403 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.747435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.747445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.747458 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.747466 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.849511 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.849551 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.849562 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.849576 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.849585 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.855803 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:55 crc kubenswrapper[4849]: E1203 12:21:55.855878 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.855995 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:55 crc kubenswrapper[4849]: E1203 12:21:55.856047 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.856195 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:55 crc kubenswrapper[4849]: E1203 12:21:55.856264 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.950604 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.950625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.950633 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.950662 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:55 crc kubenswrapper[4849]: I1203 12:21:55.950670 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:55Z","lastTransitionTime":"2025-12-03T12:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.052445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.052486 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.052494 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.052504 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.052511 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.154300 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.154327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.154336 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.154365 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.154374 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.256435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.256460 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.256469 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.256478 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.256501 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.358099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.358137 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.358148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.358162 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.358174 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.459855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.459883 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.459894 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.459905 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.459916 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.561244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.561295 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.561308 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.561319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.561327 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.662899 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.662931 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.662940 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.662952 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.662962 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.764544 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.764571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.764580 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.764589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.764597 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.855784 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:56 crc kubenswrapper[4849]: E1203 12:21:56.855902 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.871515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.871629 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.871638 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.871662 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.871670 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.973373 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.973399 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.973407 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.973417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:56 crc kubenswrapper[4849]: I1203 12:21:56.973425 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:56Z","lastTransitionTime":"2025-12-03T12:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.076032 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.076092 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.076103 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.076116 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.076144 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.178089 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.178121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.178132 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.178145 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.178155 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.279818 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.279845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.279853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.279864 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.279871 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.381304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.381334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.381342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.381353 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.381362 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.409542 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:57 crc kubenswrapper[4849]: E1203 12:21:57.409666 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:57 crc kubenswrapper[4849]: E1203 12:21:57.409740 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:22:29.409724099 +0000 UTC m=+95.871571882 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.483256 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.483285 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.483305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.483316 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.483323 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.584981 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.585010 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.585018 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.585031 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.585039 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.686931 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.686959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.686967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.686978 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.686986 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.789240 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.789268 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.789278 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.789289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.789298 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.855943 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:57 crc kubenswrapper[4849]: E1203 12:21:57.856051 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.855946 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.855955 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:57 crc kubenswrapper[4849]: E1203 12:21:57.856228 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:57 crc kubenswrapper[4849]: E1203 12:21:57.856163 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.890635 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.890678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.890686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.890698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.890707 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.992572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.992597 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.992605 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.992615 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:57 crc kubenswrapper[4849]: I1203 12:21:57.992622 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:57Z","lastTransitionTime":"2025-12-03T12:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.093763 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.093795 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.093803 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.093815 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.093822 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.195418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.195448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.195456 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.195468 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.195476 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.296580 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.296611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.296620 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.296631 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.296658 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.398511 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.398546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.398556 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.398568 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.398577 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.499980 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.500014 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.500022 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.500034 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.500043 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.601780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.601840 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.601851 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.601863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.601871 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.703814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.703846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.703854 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.703865 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.703875 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.805563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.805613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.805622 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.805634 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.805660 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.855865 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:21:58 crc kubenswrapper[4849]: E1203 12:21:58.855961 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.906820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.906848 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.906877 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.906890 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:58 crc kubenswrapper[4849]: I1203 12:21:58.906898 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:58Z","lastTransitionTime":"2025-12-03T12:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.008430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.008477 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.008486 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.008497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.008505 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.110038 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.110110 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.110121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.110138 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.110146 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.211725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.211759 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.211767 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.211780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.211789 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.313728 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.313767 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.313776 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.313788 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.313796 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.416041 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.416085 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.416094 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.416107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.416117 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.517488 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.517521 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.517530 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.517543 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.517552 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.619543 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.619569 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.619577 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.619588 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.619596 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.721085 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.721119 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.721128 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.721140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.721149 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.823317 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.823353 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.823363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.823375 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.823384 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.855887 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.855909 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.855925 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:21:59 crc kubenswrapper[4849]: E1203 12:21:59.855993 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:21:59 crc kubenswrapper[4849]: E1203 12:21:59.856049 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:21:59 crc kubenswrapper[4849]: E1203 12:21:59.856138 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.925468 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.925543 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.925554 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.925566 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:21:59 crc kubenswrapper[4849]: I1203 12:21:59.925576 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:21:59Z","lastTransitionTime":"2025-12-03T12:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.027531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.027563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.027571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.027585 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.027593 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.074969 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/0.log" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.075008 4849 generic.go:334] "Generic (PLEG): container finished" podID="1b60c35d-f388-49eb-a5d8-09a6cc752575" containerID="336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9" exitCode=1 Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.075033 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerDied","Data":"336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.075304 4849 scope.go:117] "RemoveContainer" containerID="336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.088580 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.102977 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.112471 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.123153 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.129381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.129404 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.129414 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.129426 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.129435 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.131497 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.139467 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.147315 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.157230 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.165618 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.173120 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.180807 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.187798 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.194080 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.205811 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.212190 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.218985 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.225312 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.230941 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.230965 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.230973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.230985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.230995 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.234034 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:00Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.332926 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.332953 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.332961 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.332973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.332980 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.435245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.435279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.435287 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.435301 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.435310 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.539828 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.540052 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.540126 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.540200 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.540262 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.642710 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.642747 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.642757 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.642769 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.642777 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.744627 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.744861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.744957 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.745039 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.745129 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.846845 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.846878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.846887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.846899 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.846908 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.856215 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:00 crc kubenswrapper[4849]: E1203 12:22:00.856369 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.948407 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.948433 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.948440 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.948451 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:00 crc kubenswrapper[4849]: I1203 12:22:00.948459 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:00Z","lastTransitionTime":"2025-12-03T12:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.050427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.050462 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.050470 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.050481 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.050490 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.078791 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/0.log" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.078840 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerStarted","Data":"c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.087461 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.095451 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.103189 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.115751 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.124460 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.132658 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.141464 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.151918 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.152093 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.152485 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.152501 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.152516 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.152524 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.167360 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.174482 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.185941 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.192135 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.199532 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.207168 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.214371 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.222893 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.230972 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.237421 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:01Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.253928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.253964 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.253972 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.253984 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.253992 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.355898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.355952 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.355961 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.355974 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.355985 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.457581 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.457628 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.457636 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.457665 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.457673 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.559146 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.559173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.559182 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.559192 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.559200 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.661140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.661173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.661181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.661194 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.661201 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.763124 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.763164 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.763173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.763185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.763193 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.856350 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.856367 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:01 crc kubenswrapper[4849]: E1203 12:22:01.856439 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.856463 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:01 crc kubenswrapper[4849]: E1203 12:22:01.856565 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:01 crc kubenswrapper[4849]: E1203 12:22:01.856604 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.864947 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.864973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.864981 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.864992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.865000 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.966691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.966725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.966733 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.966747 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:01 crc kubenswrapper[4849]: I1203 12:22:01.966757 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:01Z","lastTransitionTime":"2025-12-03T12:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.068699 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.068731 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.068739 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.068751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.068759 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.170822 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.170857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.170866 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.170878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.170886 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.272996 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.273027 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.273037 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.273049 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.273059 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.375125 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.375143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.375163 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.375174 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.375181 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.476802 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.476826 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.476834 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.476850 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.476859 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.578291 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.578317 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.578326 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.578337 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.578344 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.580514 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.580550 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.580560 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.580572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.580580 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.588413 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.590795 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.590909 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.590972 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.591036 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.591115 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.599055 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.601328 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.601368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.601380 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.601391 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.601399 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.610327 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.612844 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.612894 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.612903 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.612912 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.612919 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.621838 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.623809 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.623834 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.623843 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.623852 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.623860 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.631731 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:02Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.631841 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.680426 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.680470 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.680480 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.680493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.680503 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.782148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.782191 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.782201 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.782213 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.782223 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.855743 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:02 crc kubenswrapper[4849]: E1203 12:22:02.855832 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.884010 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.884050 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.884061 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.884090 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.884103 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.985990 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.986048 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.986058 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.986071 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:02 crc kubenswrapper[4849]: I1203 12:22:02.986092 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:02Z","lastTransitionTime":"2025-12-03T12:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.087782 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.087821 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.087831 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.087844 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.087853 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.189895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.189923 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.189931 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.189943 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.189951 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.291715 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.291749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.291770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.291782 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.291791 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.393803 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.393856 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.393865 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.393879 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.393887 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.495761 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.495797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.495806 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.495820 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.495828 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.597426 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.597475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.597484 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.597496 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.597504 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.699334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.699372 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.699381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.699394 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.699404 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.800509 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.800543 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.800554 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.800567 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.800577 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.856695 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.856736 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:03 crc kubenswrapper[4849]: E1203 12:22:03.856796 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:03 crc kubenswrapper[4849]: E1203 12:22:03.856886 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.856709 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:03 crc kubenswrapper[4849]: E1203 12:22:03.856964 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.857150 4849 scope.go:117] "RemoveContainer" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.866072 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.873201 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.880020 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.887851 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.896221 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.902756 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.902776 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.902785 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.902796 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.902805 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:03Z","lastTransitionTime":"2025-12-03T12:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.909911 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.918962 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.926430 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.935069 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.943488 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.952480 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.965830 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.978942 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.986075 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:03 crc kubenswrapper[4849]: I1203 12:22:03.994583 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:03Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.004705 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.004741 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.004751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.004764 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.004772 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.006577 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.014549 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.022137 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.086094 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/2.log" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.087927 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.088304 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.108985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.109028 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.109039 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.109052 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.109064 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.117783 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.135299 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.148421 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.157582 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.166554 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.175976 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.183914 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.191914 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.201875 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.210706 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.210767 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.210777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.210793 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.210800 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.211111 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.221748 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.231258 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.239237 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.252908 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.261277 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.269744 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.277911 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.286405 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.312967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.312994 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.313004 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.313017 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.313026 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.414626 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.414677 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.414686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.414699 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.414708 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.516261 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.516309 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.516319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.516329 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.516338 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.618112 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.618149 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.618158 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.618172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.618183 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.720467 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.720509 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.720517 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.720530 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.720539 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.822797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.822825 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.822833 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.822843 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.822851 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.856359 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:04 crc kubenswrapper[4849]: E1203 12:22:04.856469 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.925319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.925356 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.925365 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.925379 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:04 crc kubenswrapper[4849]: I1203 12:22:04.925388 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:04Z","lastTransitionTime":"2025-12-03T12:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.026765 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.026797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.026806 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.026819 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.026827 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.091832 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/3.log" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.092274 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/2.log" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.094661 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" exitCode=1 Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.094727 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.094841 4849 scope.go:117] "RemoveContainer" containerID="ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.095172 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:22:05 crc kubenswrapper[4849]: E1203 12:22:05.095301 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.106473 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.119741 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.128589 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.128637 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.128676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.128691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.128700 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.129423 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.137179 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.145109 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.152990 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.159583 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.171738 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed6a74d734f5d46b216994d9fabe727d2ac6b8807299cdefc1f63498a90a5a22\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:38Z\\\",\\\"message\\\":\\\"ed to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:21:38Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:21:38.438458 6502 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1203 12:21:38.438082 6502 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1203 12:21:38.438471 6502 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1203 12:21:38.438476 6502 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI1203 12:21:38.438340 6502 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUID\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:22:04Z\\\",\\\"message\\\":\\\"because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:22:04.463956 6885 services_controller.go:434] Service openshift-oauth-apiserver/api retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{api openshift-oauth-apiserver 2f1398e4-d2ff-487d-9418-90611e2e40ed 4823 0 2025-02-23 05:22:59 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:openshift-oauth-apiserver] map[operator.openshift.io/spec-hash:9c74227d7f96d723d980c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ap\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:22:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.179104 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.187406 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.194211 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.201982 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.209588 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.224949 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.231159 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.231220 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.231232 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.231249 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.231259 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.235022 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.242666 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.250697 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.258035 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:05Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.334379 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.334726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.334789 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.334946 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.334960 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.437578 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.437618 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.437626 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.437651 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.437661 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.539702 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.539857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.539942 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.540020 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.540074 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.642570 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.642605 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.642613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.642626 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.642634 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.745051 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.745295 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.745359 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.745418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.745470 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.847192 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.847225 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.847232 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.847245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.847253 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.855572 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.855578 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:05 crc kubenswrapper[4849]: E1203 12:22:05.855697 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:05 crc kubenswrapper[4849]: E1203 12:22:05.855759 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.855588 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:05 crc kubenswrapper[4849]: E1203 12:22:05.855812 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.949112 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.949140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.949148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.949159 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:05 crc kubenswrapper[4849]: I1203 12:22:05.949167 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:05Z","lastTransitionTime":"2025-12-03T12:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.051422 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.051464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.051472 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.051483 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.051491 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.097804 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/3.log" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.100410 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:22:06 crc kubenswrapper[4849]: E1203 12:22:06.100543 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.108933 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.116109 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.127550 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:22:04Z\\\",\\\"message\\\":\\\"because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:22:04.463956 6885 services_controller.go:434] Service openshift-oauth-apiserver/api retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{api openshift-oauth-apiserver 2f1398e4-d2ff-487d-9418-90611e2e40ed 4823 0 2025-02-23 05:22:59 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:openshift-oauth-apiserver] map[operator.openshift.io/spec-hash:9c74227d7f96d723d980c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ap\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:22:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.134017 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.141635 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.149265 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.153796 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.153828 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.153837 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.153851 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.153860 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.156846 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.164168 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.171562 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.180988 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.188278 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.196048 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.204076 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.217383 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.228438 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.237765 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.245445 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.253346 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:06Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.255621 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.255661 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.255672 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.255684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.255691 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.357534 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.357563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.357572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.357585 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.357594 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.459229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.459280 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.459289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.459305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.459314 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.561981 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.562028 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.562037 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.562051 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.562062 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.664376 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.664596 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.664683 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.664759 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.664817 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.766688 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.766724 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.766732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.766745 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.766754 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.855894 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:06 crc kubenswrapper[4849]: E1203 12:22:06.856027 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.869145 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.869196 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.869207 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.869220 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.869229 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.970785 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.970826 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.970853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.970869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:06 crc kubenswrapper[4849]: I1203 12:22:06.970878 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:06Z","lastTransitionTime":"2025-12-03T12:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.072735 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.072763 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.072772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.072784 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.072792 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.174958 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.174992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.175000 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.175040 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.175050 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.276304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.276337 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.276347 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.276361 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.276370 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.378473 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.378547 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.378558 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.378583 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.378595 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.480726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.480764 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.480775 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.480789 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.480800 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.582943 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.582976 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.582990 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.583003 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.583012 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.685026 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.685096 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.685109 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.685138 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.685154 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.787410 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.787580 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.787639 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.787726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.787781 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.856390 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.856451 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:07 crc kubenswrapper[4849]: E1203 12:22:07.856492 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.856539 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:07 crc kubenswrapper[4849]: E1203 12:22:07.856710 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:07 crc kubenswrapper[4849]: E1203 12:22:07.856767 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.890139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.890160 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.890169 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.890179 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.890189 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.992418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.992878 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.992943 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.993008 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:07 crc kubenswrapper[4849]: I1203 12:22:07.993058 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:07Z","lastTransitionTime":"2025-12-03T12:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.096224 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.096301 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.096314 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.096337 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.096349 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.198523 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.198581 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.198591 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.198610 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.198619 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.300709 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.300814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.300916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.300987 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.301062 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.402869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.402914 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.402924 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.402940 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.402949 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.504280 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.504318 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.504326 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.504340 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.504351 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.606126 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.606167 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.606194 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.606209 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.606219 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.707966 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.708002 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.708011 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.708023 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.708031 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.809897 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.810120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.810206 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.810279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.810341 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.856363 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:08 crc kubenswrapper[4849]: E1203 12:22:08.856553 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.912078 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.912125 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.912134 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.912148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:08 crc kubenswrapper[4849]: I1203 12:22:08.912158 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:08Z","lastTransitionTime":"2025-12-03T12:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.013330 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.013359 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.013367 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.013378 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.013387 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.114847 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.114887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.114895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.114908 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.114917 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.216918 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.216952 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.216960 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.216973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.216981 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.318821 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.318852 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.318861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.318873 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.318881 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.421006 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.421038 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.421045 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.421073 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.421089 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.522750 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.522774 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.522811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.522823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.522830 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.624571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.624608 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.624616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.624629 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.624656 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.726549 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.726585 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.726594 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.726608 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.726616 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.828667 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.828712 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.828721 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.828732 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.828740 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.856208 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:09 crc kubenswrapper[4849]: E1203 12:22:09.856299 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.856227 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:09 crc kubenswrapper[4849]: E1203 12:22:09.856360 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.856219 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:09 crc kubenswrapper[4849]: E1203 12:22:09.856406 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.930604 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.930662 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.930674 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.930686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:09 crc kubenswrapper[4849]: I1203 12:22:09.930693 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:09Z","lastTransitionTime":"2025-12-03T12:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.033029 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.033069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.033078 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.033102 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.033111 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.135448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.135480 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.135488 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.135500 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.135509 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.237280 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.237319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.237327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.237339 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.237347 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.339542 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.339574 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.339582 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.339596 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.339604 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.441715 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.441747 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.441755 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.441769 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.441777 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.543466 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.543491 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.543500 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.543511 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.543519 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.645263 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.645304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.645313 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.645325 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.645332 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.747372 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.747402 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.747409 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.747420 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.747428 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.849138 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.849169 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.849179 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.849193 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.849203 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.856531 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:10 crc kubenswrapper[4849]: E1203 12:22:10.856635 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.950742 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.950777 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.950785 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.950797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:10 crc kubenswrapper[4849]: I1203 12:22:10.950808 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:10Z","lastTransitionTime":"2025-12-03T12:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.052783 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.052818 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.052826 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.052839 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.052848 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.154113 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.154144 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.154153 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.154165 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.154173 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.256281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.256317 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.256326 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.256338 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.256346 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.358433 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.358466 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.358475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.358488 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.358496 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.459800 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.459838 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.459846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.459856 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.459864 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.561203 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.561238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.561246 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.561257 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.561266 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.662968 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.663000 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.663008 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.663019 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.663028 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.764901 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.764925 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.764941 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.764953 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.764961 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.856483 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.856515 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:11 crc kubenswrapper[4849]: E1203 12:22:11.856569 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.856592 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:11 crc kubenswrapper[4849]: E1203 12:22:11.856657 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:11 crc kubenswrapper[4849]: E1203 12:22:11.856708 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.865688 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.866181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.866218 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.866228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.866241 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.866249 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.968374 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.968395 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.968403 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.968413 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:11 crc kubenswrapper[4849]: I1203 12:22:11.968421 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:11Z","lastTransitionTime":"2025-12-03T12:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.070223 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.070252 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.070260 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.070271 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.070279 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.172054 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.172098 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.172107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.172118 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.172125 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.273937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.273971 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.273981 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.273995 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.274003 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.375959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.375992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.376000 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.376011 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.376019 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.477823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.477849 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.477857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.477868 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.477875 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.579872 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.579903 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.579913 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.579924 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.579932 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.681147 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.681172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.681180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.681191 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.681200 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.783060 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.783095 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.783103 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.783114 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.783124 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.856175 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:12 crc kubenswrapper[4849]: E1203 12:22:12.856270 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.884435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.884471 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.884480 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.884492 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.884500 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.963240 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.963276 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.963285 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.963297 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.963306 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: E1203 12:22:12.971845 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.974598 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.974622 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.974630 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.974656 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.974663 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: E1203 12:22:12.982286 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.984415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.984437 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.984445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.984454 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.984461 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:12 crc kubenswrapper[4849]: E1203 12:22:12.991847 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:12Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.993936 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.993969 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.993979 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.993991 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:12 crc kubenswrapper[4849]: I1203 12:22:12.994001 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:12Z","lastTransitionTime":"2025-12-03T12:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.001528 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.004205 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.004231 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.004239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.004249 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.004256 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.012096 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.012194 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.013155 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.013181 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.013190 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.013200 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.013207 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.114323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.114350 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.114358 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.114382 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.114391 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.216408 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.216439 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.216464 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.216477 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.216486 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.318303 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.318342 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.318350 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.318361 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.318368 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.420354 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.420412 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.420423 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.420435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.420445 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.522062 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.522109 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.522121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.522134 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.522144 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.624241 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.624269 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.624302 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.624315 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.624323 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.725937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.725974 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.725983 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.725995 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.726005 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.827860 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.827888 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.827897 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.827910 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.827919 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.856454 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.856454 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.856568 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.856615 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.856622 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:13 crc kubenswrapper[4849]: E1203 12:22:13.856743 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.864533 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.872061 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.879462 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.892163 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.900750 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.907935 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.917231 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.925156 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.929041 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.929066 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.929075 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.929100 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.929109 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:13Z","lastTransitionTime":"2025-12-03T12:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.944714 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.958403 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.968435 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.976900 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.985397 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:13 crc kubenswrapper[4849]: I1203 12:22:13.993586 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.000579 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:13Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.012476 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:22:04Z\\\",\\\"message\\\":\\\"because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:22:04.463956 6885 services_controller.go:434] Service openshift-oauth-apiserver/api retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{api openshift-oauth-apiserver 2f1398e4-d2ff-487d-9418-90611e2e40ed 4823 0 2025-02-23 05:22:59 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:openshift-oauth-apiserver] map[operator.openshift.io/spec-hash:9c74227d7f96d723d980c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ap\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:22:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.019399 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.025952 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8844952d-6213-4001-b613-84f02c2807c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be24cc55926545a24bb33260d0f7e64e0a066f9f4790db06bfb06b316b32a0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.030613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.030658 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.030669 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.030681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.030689 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.035065 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:14Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.132698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.132729 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.132737 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.132748 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.132756 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.236305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.236669 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.236687 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.236711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.236721 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.338938 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.339009 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.339018 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.339030 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.339039 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.440475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.440507 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.440514 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.440526 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.440535 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.542185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.542209 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.542217 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.542228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.542235 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.644243 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.644270 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.644279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.644304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.644313 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.746183 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.746206 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.746215 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.746226 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.746234 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.848313 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.848419 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.848427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.848438 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.848454 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.856232 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:14 crc kubenswrapper[4849]: E1203 12:22:14.856322 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.950208 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.950243 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.950251 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.950264 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:14 crc kubenswrapper[4849]: I1203 12:22:14.950274 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:14Z","lastTransitionTime":"2025-12-03T12:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.051857 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.051887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.051895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.051907 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.051916 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.154098 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.154127 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.154136 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.154149 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.154158 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.256062 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.256099 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.256107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.256119 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.256129 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.357613 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.357662 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.357671 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.357684 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.357692 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.459562 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.459601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.459609 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.459622 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.459631 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.561359 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.561387 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.561395 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.561406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.561413 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.662934 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.663053 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:19.663030642 +0000 UTC m=+146.124878425 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663166 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663220 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.663301 4849 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.663310 4849 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.663354 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:19.663341477 +0000 UTC m=+146.125189270 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.663372 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:19.663364961 +0000 UTC m=+146.125212754 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663478 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663498 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663505 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663517 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.663525 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.764187 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.764235 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764361 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764377 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764386 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764407 4849 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764440 4849 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764390 4849 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764476 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:19.764465625 +0000 UTC m=+146.226313418 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.764504 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:19.764494299 +0000 UTC m=+146.226342092 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.765142 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.765187 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.765196 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.765208 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.765216 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.856112 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.856136 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.856194 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.856310 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.856395 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:15 crc kubenswrapper[4849]: E1203 12:22:15.856447 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.866873 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.866898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.866906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.866916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.866924 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.968199 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.968244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.968252 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.968263 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:15 crc kubenswrapper[4849]: I1203 12:22:15.968271 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:15Z","lastTransitionTime":"2025-12-03T12:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.070050 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.070109 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.070121 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.070133 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.070142 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.172450 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.172478 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.172488 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.172499 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.172507 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.274363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.274406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.274416 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.274429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.274438 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.376316 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.376355 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.376365 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.376377 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.376386 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.478136 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.478161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.478169 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.478179 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.478186 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.580189 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.580222 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.580231 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.580245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.580255 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.682067 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.682110 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.682120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.682133 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.682141 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.783921 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.784147 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.784211 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.784279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.784343 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.856290 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:16 crc kubenswrapper[4849]: E1203 12:22:16.856493 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.885988 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.886013 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.886021 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.886031 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.886038 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.988927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.988968 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.988978 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.988990 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:16 crc kubenswrapper[4849]: I1203 12:22:16.989002 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:16Z","lastTransitionTime":"2025-12-03T12:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.091406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.091427 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.091435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.091445 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.091452 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.193176 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.193217 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.193227 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.193238 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.193246 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.295229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.295285 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.295295 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.295308 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.295316 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.396992 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.397020 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.397030 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.397041 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.397049 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.498772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.498797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.498805 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.498814 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.498822 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.600920 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.601128 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.601206 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.601279 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.601340 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.703257 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.703288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.703297 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.703311 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.703319 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.805624 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.805679 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.805690 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.805703 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.805711 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.855684 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.855723 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:17 crc kubenswrapper[4849]: E1203 12:22:17.855778 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.855815 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:17 crc kubenswrapper[4849]: E1203 12:22:17.855851 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:17 crc kubenswrapper[4849]: E1203 12:22:17.855912 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.907798 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.907823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.907833 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.907846 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:17 crc kubenswrapper[4849]: I1203 12:22:17.907854 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:17Z","lastTransitionTime":"2025-12-03T12:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.009075 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.009129 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.009139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.009152 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.009162 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.111288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.111319 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.111346 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.111357 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.111368 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.213369 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.213401 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.213410 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.213422 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.213431 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.315808 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.315841 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.315852 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.315863 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.315871 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.417951 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.417987 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.417994 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.418006 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.418014 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.519286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.519314 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.519323 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.519334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.519341 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.620587 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.620620 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.620630 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.620663 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.620674 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.722340 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.722406 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.722415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.722428 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.722436 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.824113 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.824142 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.824149 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.824160 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.824168 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.856367 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:18 crc kubenswrapper[4849]: E1203 12:22:18.856474 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.925615 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.925661 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.925670 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.925682 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:18 crc kubenswrapper[4849]: I1203 12:22:18.925692 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:18Z","lastTransitionTime":"2025-12-03T12:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.027100 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.027130 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.027138 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.027149 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.027158 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.129041 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.129071 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.129081 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.129102 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.129109 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.230615 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.230670 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.230680 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.230692 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.230700 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.332813 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.332858 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.332869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.332881 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.332889 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.437022 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.437057 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.437069 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.437082 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.437106 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.538985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.539025 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.539035 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.539047 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.539056 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.640811 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.640853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.640864 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.640879 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.640888 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.742778 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.742807 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.742817 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.742828 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.742837 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.844325 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.844357 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.844368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.844379 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.844387 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.856244 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.856271 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.856432 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:19 crc kubenswrapper[4849]: E1203 12:22:19.856513 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:19 crc kubenswrapper[4849]: E1203 12:22:19.856559 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:19 crc kubenswrapper[4849]: E1203 12:22:19.856679 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.856782 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:22:19 crc kubenswrapper[4849]: E1203 12:22:19.856898 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.945563 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.945592 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.945601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.945612 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:19 crc kubenswrapper[4849]: I1203 12:22:19.945619 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:19Z","lastTransitionTime":"2025-12-03T12:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.047488 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.047523 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.047531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.047542 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.047550 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.148887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.149016 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.149152 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.149276 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.149391 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.251482 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.251516 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.251524 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.251536 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.251545 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.353392 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.353417 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.353425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.353435 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.353443 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.455340 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.455402 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.455414 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.455448 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.455459 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.557120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.557143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.557151 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.557161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.557168 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.659116 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.659151 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.659160 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.659176 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.659184 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.760871 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.760906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.760914 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.760928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.760938 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.856542 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:20 crc kubenswrapper[4849]: E1203 12:22:20.856628 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.862625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.862676 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.862686 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.862698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.862706 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.963797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.963833 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.963842 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.963856 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:20 crc kubenswrapper[4849]: I1203 12:22:20.963864 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:20Z","lastTransitionTime":"2025-12-03T12:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.065287 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.065325 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.065336 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.065349 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.065359 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.167252 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.167274 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.167281 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.167291 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.167298 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.269126 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.269163 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.269172 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.269184 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.269192 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.370918 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.370954 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.370962 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.370975 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.370984 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.472597 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.472625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.472634 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.472661 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.472670 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.574194 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.574304 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.574374 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.574505 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.574581 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.675730 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.675760 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.675768 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.675780 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.675787 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.777334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.777365 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.777373 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.777383 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.777389 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.856548 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.856570 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.856588 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:21 crc kubenswrapper[4849]: E1203 12:22:21.856632 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:21 crc kubenswrapper[4849]: E1203 12:22:21.856719 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:21 crc kubenswrapper[4849]: E1203 12:22:21.856850 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.879632 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.879685 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.879696 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.879709 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.879721 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.981679 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.981704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.981711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.981748 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:21 crc kubenswrapper[4849]: I1203 12:22:21.981756 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:21Z","lastTransitionTime":"2025-12-03T12:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.083689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.083727 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.083736 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.083750 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.083759 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.185332 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.185368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.185378 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.185392 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.185401 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.287084 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.287123 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.287132 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.287143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.287150 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.388994 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.389027 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.389036 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.389050 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.389058 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.490823 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.490853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.490861 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.490888 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.490897 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.592363 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.592393 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.592418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.592428 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.592436 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.694499 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.694542 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.694553 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.694567 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.694576 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.796289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.796324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.796334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.796345 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.796354 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.855489 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:22 crc kubenswrapper[4849]: E1203 12:22:22.855575 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.897677 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.897697 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.897705 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.897714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.897721 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.999496 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.999529 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.999537 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.999550 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:22 crc kubenswrapper[4849]: I1203 12:22:22.999558 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:22Z","lastTransitionTime":"2025-12-03T12:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.101249 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.101295 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.101305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.101320 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.101329 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.203430 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.203470 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.203482 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.203497 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.203509 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.301890 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.301937 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.301947 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.301957 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.301964 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.310253 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.312397 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.312418 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.312426 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.312436 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.312442 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.319804 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.321763 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.321791 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.321798 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.321807 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.321814 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.328890 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.330865 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.330887 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.330894 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.330902 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.330909 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.338377 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.340521 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.340552 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.340560 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.340571 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.340579 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.348286 4849 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a8810eae-d27e-4008-b0f5-39f6de821e7a\\\",\\\"systemUUID\\\":\\\"823dec58-3bc9-4735-a59a-6b887b18964d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.348387 4849 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.349306 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.349350 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.349359 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.349368 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.349376 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.450625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.450668 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.450679 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.450689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.450697 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.552458 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.552484 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.552493 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.552503 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.552510 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.654672 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.654704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.654713 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.654723 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.654734 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.756941 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.757073 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.757165 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.757225 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.757280 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.855548 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.855703 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.855583 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.855896 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.856021 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:23 crc kubenswrapper[4849]: E1203 12:22:23.856077 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.858625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.858725 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.858787 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.858852 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.858905 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.864747 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://231243c558ac247c795ddf887b16f01d090f4be7e9e1238a3d89b6b9ce65e548\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://878249307eb3f1c729b1cd5af13711431da688c876804e1db4b342941c1adfbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.872330 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8fc48a38eaf9aacd647c8efa3983b23bdfe13cac3f4a4ac5727e06a2cb1ed97b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.879198 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-x5bqz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6f3b328-3994-4c31-841d-ea1af43d8326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d41d4c960b3af22c3c86acbe77ee3cc855ab577dc4aa609cd5ab3d89c65482d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brvfs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-x5bqz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.890371 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:22:04Z\\\",\\\"message\\\":\\\"because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:04Z is after 2025-08-24T17:21:41Z]\\\\nI1203 12:22:04.463956 6885 services_controller.go:434] Service openshift-oauth-apiserver/api retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{api openshift-oauth-apiserver 2f1398e4-d2ff-487d-9418-90611e2e40ed 4823 0 2025-02-23 05:22:59 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[app:openshift-oauth-apiserver] map[operator.openshift.io/spec-hash:9c74227d7f96d723d980c50373a5e91f08c5893365bfd5a5040449b1b6585a23 prometheus.io/scheme:https prometheus.io/scrape:true service.alpha.openshift.io/serving-cert-secret-name:serving-cert service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ap\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:22:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x6m9x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tkrt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.897120 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-2g6w4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d091257-d0f1-4248-b29a-7ce399629cb0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75d40ea11c58b8d8d50a458430770d04b4391b0bc7b51843cc56e3e794674023\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5lx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:13Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-2g6w4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.903158 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8844952d-6213-4001-b613-84f02c2807c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be24cc55926545a24bb33260d0f7e64e0a066f9f4790db06bfb06b316b32a0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77a9b353f0813e415205c4827d8d22bd8abef028320acc66dda09cea6e86431f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.912180 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.918753 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c8htm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hjzzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.926071 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.933018 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d80ee321-2880-456a-9f19-c46cb0ab8128\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d025137ca05e5c13589667194f7b43cdddcbc4a662211895c8fb84859f5f31f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gnjv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hszbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.944901 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5084ec37-b005-407e-8c7f-4528595e1bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63034e99ee64b13bf7c8c36992943eb046a855aa9ac98d5fe1644ad06140e16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f39b9bf741810c7914152dd1e6238cc0f9f33cd1814507756c82a01307b4e039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a53dbcc552c984f611cf31941c015eee405417385f6bf3c1ef826e087705221d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e45f0cc2b402d7a34212cc8b6a41cb4c86589028e5476fe8c6cc89881ca483b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb107fa191b08d79e0df18535f05070ee2c5d95a999643daa6c9cdb2a59829b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91cd1b52f75576ab927dc7c5cf5660e2e8649db7020205b60216bb03aa2d25c7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3a9b5bc19496e8b30cf2f89fe764f4834300fdf61d92b8a096e7d997e07cf0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b054433a0b10beafe6460165b4673c9c0e6d7a4876fc6c1340bd449ff11f1c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.953401 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11092d44-9365-49cc-a1f7-74b1d12d1750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"203 12:21:11.386203 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 12:21:11.386205 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 12:21:11.386209 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 12:21:11.386506 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1203 12:21:11.389486 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764764455\\\\\\\\\\\\\\\" (2025-12-03 12:20:55 +0000 UTC to 2026-01-02 12:20:56 +0000 UTC (now=2025-12-03 12:21:11.389462809 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389624 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764764471\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764764471\\\\\\\\\\\\\\\" (2025-12-03 11:21:11 +0000 UTC to 2026-12-03 11:21:11 +0000 UTC (now=2025-12-03 12:21:11.38958069 +0000 UTC))\\\\\\\"\\\\nI1203 12:21:11.389658 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1203 12:21:11.389677 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1203 12:21:11.389694 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389709 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1203 12:21:11.389731 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3375497825/tls.crt::/tmp/serving-cert-3375497825/tls.key\\\\\\\"\\\\nF1203 12:21:11.389799 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 12:21:11.389814 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960025 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"108d7f33-ea04-49bb-946b-fb0041be7b1e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eef3aae1f40c61039c52b7e6d6bb856d3caab399b5cfe936b1c924b3c87fdb4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18b5086d9229e57899f7a5b8ba3759ffa39094337e75b91311759c9d658c0592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8pwc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dbqpv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960794 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960819 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960828 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960839 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.960848 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:23Z","lastTransitionTime":"2025-12-03T12:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.967717 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ced91b2-721a-4c30-8321-22dd26c57733\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf34c4a30df2054f3a654fa096b6bbe2f606fc704061821997a7f892c7e400c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c232750c0f8e1a688c74283046b81b8526591fc60ba8d5bbccdc1812134c46ee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a1434b185118d5d876fc174b1d477b879a20f7b86f7272aa4e5c4c35abb143\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.974687 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61deabad-c78e-48a9-85b4-427f288f987e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa19bc0deed4ef04b5eedd9f2c52b31915bac287da48ccbb3a723b1eae85b0ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78b61a64dfc9b6cb90be1936a3d95ef3dff7f347c848ce4403303b5765592e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e967e4cd7a8c3dde7ea58e96f356524c73163c942d7c067e0e37b9c20bbc85b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b245404d98d50a9e6dd86f5a3cb9de6f0da062798a84fe1ec638d9c34bedb508\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:20:54Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:20:53Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.982631 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-2pjsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1b60c35d-f388-49eb-a5d8-09a6cc752575\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:22:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T12:21:59Z\\\",\\\"message\\\":\\\"2025-12-03T12:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe\\\\n2025-12-03T12:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d20d24a5-19a1-46da-8e94-cb0cb3dfdebe to /host/opt/cni/bin/\\\\n2025-12-03T12:21:14Z [verbose] multus-daemon started\\\\n2025-12-03T12:21:14Z [verbose] Readiness Indicator file check\\\\n2025-12-03T12:21:59Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:22:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ps8nh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-2pjsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.991077 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f83a1f67-8a6b-4725-8da9-31a7def7be47\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e37a1d04f34bc700593ee50ccd5da499fe4681ca02a20d864b30d76b99c1b3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fda86e63501cd034386877d67dd4d09034e48dc64e31bf9fd142e9f4fc9cb0a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd9044faeb494946b6fa3a2442d3647072463843ecef654e42c88635de00c59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95867d83ca4b4c422724531d1efd7e997b87f2fdcbcdefa331ae0e40beaddb7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3da9e8e71795087726f7ee9ad26cb1b72b9f7c87acd915940233b5d7aed6d252\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1040a239a6bb76430b9a1c619cb81595448bbc5872d70b1b81bf05226090c171\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fa5267e4cdd497ef5f138db90af9caa8f02109a8e83579e55f82755314c151e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T12:21:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T12:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7tkh9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T12:21:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wrlhp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:23 crc kubenswrapper[4849]: I1203 12:22:23.998738 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ffcbbfebf0a0776a7681bbc7bec9207420d65ad788556c060c26b914dca4124b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T12:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:23Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.005949 4849 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T12:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T12:22:24Z is after 2025-08-24T17:21:41Z" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.062521 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.062543 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.062551 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.062562 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.062570 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.163490 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.163516 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.163524 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.163535 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.163543 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.265508 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.265599 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.265699 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.265759 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.265820 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.367928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.367960 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.367968 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.367982 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.367990 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.470106 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.470127 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.470134 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.470144 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.470151 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.571945 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.571970 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.571978 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.571988 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.571995 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.673959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.673980 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.673987 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.673996 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.674002 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.775258 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.775288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.775297 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.775308 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.775317 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.856137 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:24 crc kubenswrapper[4849]: E1203 12:22:24.856223 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.877428 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.877465 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.877474 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.877486 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.877494 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.979463 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.979522 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.979531 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.979541 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:24 crc kubenswrapper[4849]: I1203 12:22:24.979548 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:24Z","lastTransitionTime":"2025-12-03T12:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.080931 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.080973 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.080983 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.080997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.081008 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.182730 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.182762 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.182771 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.182781 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.182790 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.284738 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.284770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.284778 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.284789 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.284797 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.386475 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.386523 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.386532 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.386544 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.386552 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.488213 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.488246 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.488254 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.488265 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.488273 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.589479 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.589502 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.589509 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.589519 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.589526 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.691009 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.691044 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.691054 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.691066 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.691075 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.792944 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.792988 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.792997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.793008 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.793016 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.855975 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.855988 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:25 crc kubenswrapper[4849]: E1203 12:22:25.856068 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.856112 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:25 crc kubenswrapper[4849]: E1203 12:22:25.856190 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:25 crc kubenswrapper[4849]: E1203 12:22:25.856258 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.894895 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.894919 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.894928 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.894938 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.894945 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.996137 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.996165 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.996173 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.996182 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:25 crc kubenswrapper[4849]: I1203 12:22:25.996190 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:25Z","lastTransitionTime":"2025-12-03T12:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.097565 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.097592 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.097601 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.097610 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.097617 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.199459 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.199505 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.199515 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.199529 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.199538 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.300862 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.300891 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.300898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.300908 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.300917 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.402305 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.402343 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.402352 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.402366 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.402375 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.504412 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.504444 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.504451 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.504462 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.504469 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.605772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.605816 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.605824 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.605833 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.605841 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.706872 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.706906 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.706916 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.706952 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.706968 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.808229 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.808264 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.808273 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.808286 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.808295 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.855841 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:26 crc kubenswrapper[4849]: E1203 12:22:26.855924 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.910109 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.910131 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.910139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.910148 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:26 crc kubenswrapper[4849]: I1203 12:22:26.910154 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:26Z","lastTransitionTime":"2025-12-03T12:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.011705 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.011726 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.011734 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.011743 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.011751 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.113084 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.113141 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.113150 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.113161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.113167 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.214860 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.214967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.215023 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.215087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.215158 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.316772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.316960 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.317046 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.317120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.317209 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.419262 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.419623 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.419721 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.419797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.419850 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.522157 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.522193 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.522202 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.522217 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.522226 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.623805 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.623837 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.623855 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.623868 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.623878 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.725057 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.725091 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.725112 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.725125 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.725134 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.827107 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.827142 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.827150 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.827161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.827172 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.855514 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.855547 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.855553 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:27 crc kubenswrapper[4849]: E1203 12:22:27.855605 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:27 crc kubenswrapper[4849]: E1203 12:22:27.855723 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:27 crc kubenswrapper[4849]: E1203 12:22:27.855815 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.929027 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.929067 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.929075 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.929087 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:27 crc kubenswrapper[4849]: I1203 12:22:27.929102 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:27Z","lastTransitionTime":"2025-12-03T12:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.030940 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.030969 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.030979 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.030990 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.030998 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.133570 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.133592 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.133600 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.133610 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.133619 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.235120 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.235158 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.235170 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.235184 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.235193 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.337429 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.337461 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.337471 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.337483 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.337491 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.439224 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.439274 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.439282 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.439294 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.439302 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.540862 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.540889 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.540899 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.540908 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.540934 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.642290 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.642327 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.642335 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.642348 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.642356 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.744546 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.744572 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.744579 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.744591 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.744600 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.846183 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.846214 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.846222 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.846234 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.846241 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.855509 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:28 crc kubenswrapper[4849]: E1203 12:22:28.855747 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.947864 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.947890 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.947898 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.947909 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:28 crc kubenswrapper[4849]: I1203 12:22:28.947917 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:28Z","lastTransitionTime":"2025-12-03T12:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.049912 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.049957 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.049968 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.049984 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.049995 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.152084 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.152136 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.152144 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.152161 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.152172 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.254175 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.254244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.254255 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.254277 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.254291 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.357139 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.357180 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.357189 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.357203 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.357213 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.458932 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.458974 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.458982 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.458997 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.459007 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.474746 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:29 crc kubenswrapper[4849]: E1203 12:22:29.474848 4849 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:22:29 crc kubenswrapper[4849]: E1203 12:22:29.474904 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs podName:1cebc8f9-e598-45ce-aed1-4fbd7df7fb86 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:33.474889425 +0000 UTC m=+159.936737208 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs") pod "network-metrics-daemon-hjzzk" (UID: "1cebc8f9-e598-45ce-aed1-4fbd7df7fb86") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.563750 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.563788 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.563889 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.563926 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.564237 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.666663 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.666698 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.666707 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.666719 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.666748 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.768554 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.768590 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.768599 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.768616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.768623 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.856320 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.856376 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.856332 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:29 crc kubenswrapper[4849]: E1203 12:22:29.856457 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:29 crc kubenswrapper[4849]: E1203 12:22:29.856570 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:29 crc kubenswrapper[4849]: E1203 12:22:29.856660 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.870210 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.870245 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.870255 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.870267 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.870278 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.973017 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.973048 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.973056 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.973068 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:29 crc kubenswrapper[4849]: I1203 12:22:29.973079 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:29Z","lastTransitionTime":"2025-12-03T12:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.074927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.074959 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.074969 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.074982 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.074990 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.177084 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.177128 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.177135 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.177147 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.177154 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.279580 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.279617 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.279625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.279664 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.279673 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.381770 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.381838 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.381853 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.381876 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.381888 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.483917 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.483956 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.483967 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.483980 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.483988 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.585290 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.585322 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.585329 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.585341 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.585351 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.687185 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.687219 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.687228 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.687239 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.687249 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.788927 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.788976 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.788985 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.789006 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.789015 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.855562 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:30 crc kubenswrapper[4849]: E1203 12:22:30.855704 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.890250 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.890280 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.890288 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.890300 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.890309 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.991751 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.991787 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.991797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.991829 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:30 crc kubenswrapper[4849]: I1203 12:22:30.991837 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:30Z","lastTransitionTime":"2025-12-03T12:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.093584 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.093616 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.093625 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.093636 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.093660 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.195720 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.195742 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.195749 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.195759 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.195782 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.297869 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.297913 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.297922 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.297932 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.297940 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.399633 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.399681 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.399689 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.399701 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.399708 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.501412 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.501444 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.501451 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.501462 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.501470 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.603334 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.603381 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.603390 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.603402 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.603409 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.705068 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.705119 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.705129 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.705140 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.705148 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.806386 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.806442 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.806451 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.806461 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.806468 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.856502 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:31 crc kubenswrapper[4849]: E1203 12:22:31.856588 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.856625 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.856670 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:31 crc kubenswrapper[4849]: E1203 12:22:31.856712 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:31 crc kubenswrapper[4849]: E1203 12:22:31.856819 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.908143 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.908186 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.908196 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.908206 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:31 crc kubenswrapper[4849]: I1203 12:22:31.908213 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:31Z","lastTransitionTime":"2025-12-03T12:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.009575 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.009604 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.009611 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.009622 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.009629 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.112276 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.112313 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.112324 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.112336 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.112347 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.214207 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.214236 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.214244 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.214255 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.214262 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.316376 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.316415 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.316425 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.316440 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.316451 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.418056 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.418088 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.418110 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.418125 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.418134 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.519772 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.519797 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.519805 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.519815 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.519823 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.621355 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.621375 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.621382 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.621391 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.621398 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.723156 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.723187 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.723195 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.723206 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.723213 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.824678 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.824704 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.824711 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.824720 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.824727 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.855533 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:32 crc kubenswrapper[4849]: E1203 12:22:32.855625 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.856135 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:22:32 crc kubenswrapper[4849]: E1203 12:22:32.856248 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tkrt4_openshift-ovn-kubernetes(3d8dd3fd-f66b-4e40-a41b-e444e5e8b677)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.926691 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.926714 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.926722 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.926731 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:32 crc kubenswrapper[4849]: I1203 12:22:32.926738 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:32Z","lastTransitionTime":"2025-12-03T12:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.028246 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.028271 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.028278 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.028289 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.028296 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:33Z","lastTransitionTime":"2025-12-03T12:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.129455 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.129492 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.129501 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.129513 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.129521 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:33Z","lastTransitionTime":"2025-12-03T12:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.231115 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.231146 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.231153 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.231167 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.231175 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:33Z","lastTransitionTime":"2025-12-03T12:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.333011 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.333049 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.333058 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.333070 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.333080 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:33Z","lastTransitionTime":"2025-12-03T12:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.399960 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.399996 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.400007 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.400019 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.400027 4849 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T12:22:33Z","lastTransitionTime":"2025-12-03T12:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.437302 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744"] Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.437587 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.438894 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.439059 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.439152 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.440355 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.449791 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-2pjsx" podStartSLOduration=82.449782294 podStartE2EDuration="1m22.449782294s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.449408431 +0000 UTC m=+99.911256213" watchObservedRunningTime="2025-12-03 12:22:33.449782294 +0000 UTC m=+99.911630076" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.470336 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-wrlhp" podStartSLOduration=82.470317279 podStartE2EDuration="1m22.470317279s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.461745485 +0000 UTC m=+99.923593269" watchObservedRunningTime="2025-12-03 12:22:33.470317279 +0000 UTC m=+99.932165061" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.500461 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-x5bqz" podStartSLOduration=82.500447071 podStartE2EDuration="1m22.500447071s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.500231396 +0000 UTC m=+99.962079179" watchObservedRunningTime="2025-12-03 12:22:33.500447071 +0000 UTC m=+99.962294854" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.508430 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e756547b-5e25-4547-888b-2cf63a7f08ec-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.508470 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e756547b-5e25-4547-888b-2cf63a7f08ec-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.508488 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.508506 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.508521 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e756547b-5e25-4547-888b-2cf63a7f08ec-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.525930 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-2g6w4" podStartSLOduration=82.525916888 podStartE2EDuration="1m22.525916888s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.525838361 +0000 UTC m=+99.987686144" watchObservedRunningTime="2025-12-03 12:22:33.525916888 +0000 UTC m=+99.987764671" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.539793 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.539779803 podStartE2EDuration="22.539779803s" podCreationTimestamp="2025-12-03 12:22:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.538955593 +0000 UTC m=+100.000803366" watchObservedRunningTime="2025-12-03 12:22:33.539779803 +0000 UTC m=+100.001627587" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.583940 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podStartSLOduration=82.583926892 podStartE2EDuration="1m22.583926892s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.568212122 +0000 UTC m=+100.030059905" watchObservedRunningTime="2025-12-03 12:22:33.583926892 +0000 UTC m=+100.045774665" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.584150 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=81.584146645 podStartE2EDuration="1m21.584146645s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.583903969 +0000 UTC m=+100.045751751" watchObservedRunningTime="2025-12-03 12:22:33.584146645 +0000 UTC m=+100.045994428" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.594835 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=81.594827054 podStartE2EDuration="1m21.594827054s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.594172684 +0000 UTC m=+100.056020466" watchObservedRunningTime="2025-12-03 12:22:33.594827054 +0000 UTC m=+100.056674837" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.601860 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dbqpv" podStartSLOduration=81.601851235 podStartE2EDuration="1m21.601851235s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.601741089 +0000 UTC m=+100.063588872" watchObservedRunningTime="2025-12-03 12:22:33.601851235 +0000 UTC m=+100.063699019" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609012 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609046 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609065 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e756547b-5e25-4547-888b-2cf63a7f08ec-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609128 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e756547b-5e25-4547-888b-2cf63a7f08ec-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609145 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609151 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e756547b-5e25-4547-888b-2cf63a7f08ec-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609148 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e756547b-5e25-4547-888b-2cf63a7f08ec-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.609910 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e756547b-5e25-4547-888b-2cf63a7f08ec-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.613303 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e756547b-5e25-4547-888b-2cf63a7f08ec-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.620942 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=45.620927757 podStartE2EDuration="45.620927757s" podCreationTimestamp="2025-12-03 12:21:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.620504552 +0000 UTC m=+100.082352335" watchObservedRunningTime="2025-12-03 12:22:33.620927757 +0000 UTC m=+100.082775541" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.621175 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=79.621171376 podStartE2EDuration="1m19.621171376s" podCreationTimestamp="2025-12-03 12:21:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:33.6127097 +0000 UTC m=+100.074557484" watchObservedRunningTime="2025-12-03 12:22:33.621171376 +0000 UTC m=+100.083019160" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.622834 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e756547b-5e25-4547-888b-2cf63a7f08ec-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pf744\" (UID: \"e756547b-5e25-4547-888b-2cf63a7f08ec\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.747347 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.855877 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.855890 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:33 crc kubenswrapper[4849]: I1203 12:22:33.855917 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:33 crc kubenswrapper[4849]: E1203 12:22:33.856663 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:33 crc kubenswrapper[4849]: E1203 12:22:33.856730 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:33 crc kubenswrapper[4849]: E1203 12:22:33.856840 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:34 crc kubenswrapper[4849]: I1203 12:22:34.158117 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" event={"ID":"e756547b-5e25-4547-888b-2cf63a7f08ec","Type":"ContainerStarted","Data":"f169d810677d7a4539ce53b8117d431512dd6178ae58594fd26346ee9546ae44"} Dec 03 12:22:34 crc kubenswrapper[4849]: I1203 12:22:34.158159 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" event={"ID":"e756547b-5e25-4547-888b-2cf63a7f08ec","Type":"ContainerStarted","Data":"831235b193de471d593214d56f91cbf18127eef764302d4602a57713382cfc3b"} Dec 03 12:22:34 crc kubenswrapper[4849]: I1203 12:22:34.168756 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pf744" podStartSLOduration=83.168741032 podStartE2EDuration="1m23.168741032s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:34.168279584 +0000 UTC m=+100.630127367" watchObservedRunningTime="2025-12-03 12:22:34.168741032 +0000 UTC m=+100.630588816" Dec 03 12:22:34 crc kubenswrapper[4849]: I1203 12:22:34.856446 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:34 crc kubenswrapper[4849]: E1203 12:22:34.856548 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:35 crc kubenswrapper[4849]: I1203 12:22:35.856145 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:35 crc kubenswrapper[4849]: I1203 12:22:35.856218 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:35 crc kubenswrapper[4849]: I1203 12:22:35.856237 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:35 crc kubenswrapper[4849]: E1203 12:22:35.856325 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:35 crc kubenswrapper[4849]: E1203 12:22:35.856430 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:35 crc kubenswrapper[4849]: E1203 12:22:35.856469 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:36 crc kubenswrapper[4849]: I1203 12:22:36.856539 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:36 crc kubenswrapper[4849]: E1203 12:22:36.856691 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:37 crc kubenswrapper[4849]: I1203 12:22:37.855721 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:37 crc kubenswrapper[4849]: I1203 12:22:37.855811 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:37 crc kubenswrapper[4849]: E1203 12:22:37.855900 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:37 crc kubenswrapper[4849]: I1203 12:22:37.855923 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:37 crc kubenswrapper[4849]: E1203 12:22:37.856100 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:37 crc kubenswrapper[4849]: E1203 12:22:37.856148 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:38 crc kubenswrapper[4849]: I1203 12:22:38.855870 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:38 crc kubenswrapper[4849]: E1203 12:22:38.855969 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:39 crc kubenswrapper[4849]: I1203 12:22:39.856359 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:39 crc kubenswrapper[4849]: E1203 12:22:39.856434 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:39 crc kubenswrapper[4849]: I1203 12:22:39.856466 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:39 crc kubenswrapper[4849]: E1203 12:22:39.856556 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:39 crc kubenswrapper[4849]: I1203 12:22:39.856472 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:39 crc kubenswrapper[4849]: E1203 12:22:39.856672 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:40 crc kubenswrapper[4849]: I1203 12:22:40.856188 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:40 crc kubenswrapper[4849]: E1203 12:22:40.856314 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:41 crc kubenswrapper[4849]: I1203 12:22:41.856097 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:41 crc kubenswrapper[4849]: E1203 12:22:41.856191 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:41 crc kubenswrapper[4849]: I1203 12:22:41.856097 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:41 crc kubenswrapper[4849]: E1203 12:22:41.856376 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:41 crc kubenswrapper[4849]: I1203 12:22:41.856387 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:41 crc kubenswrapper[4849]: E1203 12:22:41.856461 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:42 crc kubenswrapper[4849]: I1203 12:22:42.856269 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:42 crc kubenswrapper[4849]: E1203 12:22:42.856366 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:43 crc kubenswrapper[4849]: I1203 12:22:43.855835 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:43 crc kubenswrapper[4849]: I1203 12:22:43.856039 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:43 crc kubenswrapper[4849]: I1203 12:22:43.856735 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:43 crc kubenswrapper[4849]: E1203 12:22:43.857103 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:43 crc kubenswrapper[4849]: E1203 12:22:43.856832 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:43 crc kubenswrapper[4849]: E1203 12:22:43.856739 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:44 crc kubenswrapper[4849]: I1203 12:22:44.856022 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:44 crc kubenswrapper[4849]: E1203 12:22:44.856303 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:44 crc kubenswrapper[4849]: I1203 12:22:44.856863 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.183038 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/3.log" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.185093 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerStarted","Data":"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0"} Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.185438 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.209750 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podStartSLOduration=93.209738142 podStartE2EDuration="1m33.209738142s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:22:45.209171416 +0000 UTC m=+111.671019199" watchObservedRunningTime="2025-12-03 12:22:45.209738142 +0000 UTC m=+111.671585925" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.465261 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hjzzk"] Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.465349 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:45 crc kubenswrapper[4849]: E1203 12:22:45.465416 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.855613 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.855613 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:45 crc kubenswrapper[4849]: E1203 12:22:45.855718 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:45 crc kubenswrapper[4849]: I1203 12:22:45.855771 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:45 crc kubenswrapper[4849]: E1203 12:22:45.855829 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:45 crc kubenswrapper[4849]: E1203 12:22:45.855891 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.188772 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/1.log" Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.189076 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/0.log" Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.189105 4849 generic.go:334] "Generic (PLEG): container finished" podID="1b60c35d-f388-49eb-a5d8-09a6cc752575" containerID="c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d" exitCode=1 Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.189202 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerDied","Data":"c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d"} Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.189234 4849 scope.go:117] "RemoveContainer" containerID="336e904c5d2c8f52d601cf067caed1cdeda243b9206d7d8697b55ac2b1b2f5a9" Dec 03 12:22:46 crc kubenswrapper[4849]: I1203 12:22:46.189539 4849 scope.go:117] "RemoveContainer" containerID="c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d" Dec 03 12:22:46 crc kubenswrapper[4849]: E1203 12:22:46.189704 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-2pjsx_openshift-multus(1b60c35d-f388-49eb-a5d8-09a6cc752575)\"" pod="openshift-multus/multus-2pjsx" podUID="1b60c35d-f388-49eb-a5d8-09a6cc752575" Dec 03 12:22:47 crc kubenswrapper[4849]: I1203 12:22:47.192383 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/1.log" Dec 03 12:22:47 crc kubenswrapper[4849]: I1203 12:22:47.856260 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:47 crc kubenswrapper[4849]: I1203 12:22:47.856302 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:47 crc kubenswrapper[4849]: I1203 12:22:47.856271 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:47 crc kubenswrapper[4849]: E1203 12:22:47.856395 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:47 crc kubenswrapper[4849]: E1203 12:22:47.856470 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:47 crc kubenswrapper[4849]: E1203 12:22:47.856524 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:47 crc kubenswrapper[4849]: I1203 12:22:47.856597 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:47 crc kubenswrapper[4849]: E1203 12:22:47.856737 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:49 crc kubenswrapper[4849]: I1203 12:22:49.856369 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:49 crc kubenswrapper[4849]: E1203 12:22:49.856897 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:49 crc kubenswrapper[4849]: I1203 12:22:49.856486 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:49 crc kubenswrapper[4849]: I1203 12:22:49.856408 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:49 crc kubenswrapper[4849]: I1203 12:22:49.856439 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:49 crc kubenswrapper[4849]: E1203 12:22:49.857222 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:49 crc kubenswrapper[4849]: E1203 12:22:49.857307 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:49 crc kubenswrapper[4849]: E1203 12:22:49.857361 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:51 crc kubenswrapper[4849]: I1203 12:22:51.855603 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:51 crc kubenswrapper[4849]: I1203 12:22:51.855629 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:51 crc kubenswrapper[4849]: I1203 12:22:51.855660 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:51 crc kubenswrapper[4849]: E1203 12:22:51.855757 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:51 crc kubenswrapper[4849]: I1203 12:22:51.855768 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:51 crc kubenswrapper[4849]: E1203 12:22:51.855833 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:51 crc kubenswrapper[4849]: E1203 12:22:51.855872 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:51 crc kubenswrapper[4849]: E1203 12:22:51.855925 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:53 crc kubenswrapper[4849]: I1203 12:22:53.856045 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:53 crc kubenswrapper[4849]: I1203 12:22:53.856059 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:53 crc kubenswrapper[4849]: I1203 12:22:53.856058 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.856882 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:53 crc kubenswrapper[4849]: I1203 12:22:53.856923 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.857009 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.857068 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.857147 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.895092 4849 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 12:22:53 crc kubenswrapper[4849]: E1203 12:22:53.910896 4849 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:22:55 crc kubenswrapper[4849]: I1203 12:22:55.856223 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:55 crc kubenswrapper[4849]: I1203 12:22:55.856264 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:55 crc kubenswrapper[4849]: I1203 12:22:55.856298 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:55 crc kubenswrapper[4849]: E1203 12:22:55.856326 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:55 crc kubenswrapper[4849]: I1203 12:22:55.856350 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:55 crc kubenswrapper[4849]: E1203 12:22:55.856420 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:55 crc kubenswrapper[4849]: E1203 12:22:55.856471 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:55 crc kubenswrapper[4849]: E1203 12:22:55.856525 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:57 crc kubenswrapper[4849]: I1203 12:22:57.855931 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:57 crc kubenswrapper[4849]: I1203 12:22:57.855969 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:57 crc kubenswrapper[4849]: I1203 12:22:57.855971 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:57 crc kubenswrapper[4849]: E1203 12:22:57.856030 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:57 crc kubenswrapper[4849]: I1203 12:22:57.856040 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:57 crc kubenswrapper[4849]: E1203 12:22:57.856132 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:22:57 crc kubenswrapper[4849]: E1203 12:22:57.856210 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:57 crc kubenswrapper[4849]: E1203 12:22:57.856247 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:58 crc kubenswrapper[4849]: E1203 12:22:58.911634 4849 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:22:59 crc kubenswrapper[4849]: I1203 12:22:59.855753 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:22:59 crc kubenswrapper[4849]: I1203 12:22:59.855802 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:22:59 crc kubenswrapper[4849]: I1203 12:22:59.855816 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:22:59 crc kubenswrapper[4849]: I1203 12:22:59.855762 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:22:59 crc kubenswrapper[4849]: E1203 12:22:59.855870 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:22:59 crc kubenswrapper[4849]: E1203 12:22:59.855933 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:22:59 crc kubenswrapper[4849]: E1203 12:22:59.856007 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:22:59 crc kubenswrapper[4849]: E1203 12:22:59.856100 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:23:01 crc kubenswrapper[4849]: I1203 12:23:01.855936 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:01 crc kubenswrapper[4849]: I1203 12:23:01.855979 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:01 crc kubenswrapper[4849]: E1203 12:23:01.856037 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:23:01 crc kubenswrapper[4849]: I1203 12:23:01.856063 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:01 crc kubenswrapper[4849]: I1203 12:23:01.856067 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:01 crc kubenswrapper[4849]: E1203 12:23:01.856250 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:23:01 crc kubenswrapper[4849]: E1203 12:23:01.856329 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:23:01 crc kubenswrapper[4849]: E1203 12:23:01.856386 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:23:01 crc kubenswrapper[4849]: I1203 12:23:01.856407 4849 scope.go:117] "RemoveContainer" containerID="c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d" Dec 03 12:23:02 crc kubenswrapper[4849]: I1203 12:23:02.226924 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/1.log" Dec 03 12:23:02 crc kubenswrapper[4849]: I1203 12:23:02.227340 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerStarted","Data":"ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098"} Dec 03 12:23:03 crc kubenswrapper[4849]: I1203 12:23:03.855670 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:03 crc kubenswrapper[4849]: I1203 12:23:03.855671 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:03 crc kubenswrapper[4849]: I1203 12:23:03.855730 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:03 crc kubenswrapper[4849]: E1203 12:23:03.856602 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 12:23:03 crc kubenswrapper[4849]: I1203 12:23:03.856660 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:03 crc kubenswrapper[4849]: E1203 12:23:03.856734 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 12:23:03 crc kubenswrapper[4849]: E1203 12:23:03.856851 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hjzzk" podUID="1cebc8f9-e598-45ce-aed1-4fbd7df7fb86" Dec 03 12:23:03 crc kubenswrapper[4849]: E1203 12:23:03.856970 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.205113 4849 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.225475 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.225821 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227056 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227402 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227456 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227479 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227793 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227879 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227907 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227925 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.227940 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tqt6\" (UniqueName: \"kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.228802 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.228858 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zhk4k"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.229233 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.229336 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.229776 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.230162 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.230605 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-jcr69"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.231005 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.231686 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.232006 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233428 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233534 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233544 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233633 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233828 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.233853 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.235363 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5jm84"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.235759 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.235794 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.236002 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-trkjr"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.242821 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.249223 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.249589 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.249745 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.249771 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.249927 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.250236 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251391 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251531 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251560 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251745 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251807 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.251923 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.252584 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.252789 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.252900 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.252988 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.253136 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.253238 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.253359 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.253473 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254535 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254598 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254684 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254716 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254870 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254941 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.254974 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255049 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255098 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255195 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255210 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255197 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255284 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255333 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.255707 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.256084 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.257718 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.257852 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.259415 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.259546 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.259874 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.260586 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.260932 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.261016 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-x6dkl"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.261412 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.262195 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.262498 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263009 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263087 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263216 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263288 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263402 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.263623 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.264490 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.264602 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.264772 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.264858 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.264986 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.265322 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.265389 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.265800 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zlm6p"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.265851 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.266084 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.266250 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.266527 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.267205 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.267546 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.267613 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.268037 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.268950 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.269289 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.270432 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.270635 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.274009 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.282691 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.285263 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8ddts"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.285412 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.286300 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.286570 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.291772 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.291931 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292232 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292342 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292371 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292381 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292463 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-657zm"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292484 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292491 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292528 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292557 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292590 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292629 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292653 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292697 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292705 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292494 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292731 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292738 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292745 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292787 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292822 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.292903 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293414 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293527 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293625 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293750 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293800 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293847 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293894 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.293974 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.294336 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.296526 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zdq4x"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.296729 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.297799 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.299144 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.300460 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.303759 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.303942 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.305629 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.306184 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.306741 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307108 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307705 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307779 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307795 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307716 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307860 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.307890 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.308416 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.308626 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309042 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309100 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309459 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309479 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309842 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.309979 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.311179 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.312140 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.312715 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.325533 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.326033 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.327023 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.327415 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.327432 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.327843 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329150 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-etcd-client\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329195 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329238 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329257 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329279 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-serving-cert\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329300 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tqt6\" (UniqueName: \"kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329335 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329355 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329377 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smdsr\" (UniqueName: \"kubernetes.io/projected/d3de6981-1f82-494a-b652-59c9677791b5-kube-api-access-smdsr\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329422 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-config\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.329443 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-service-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.331603 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zjlhp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.332215 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.332930 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.333239 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.334270 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.337574 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.337732 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.337946 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.338230 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.338266 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.338281 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jcr69"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.338321 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.338927 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zhk4k"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.339816 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.340390 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-gct6s"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.340899 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.341921 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.342824 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-trkjr"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.343772 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.344373 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.345207 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.347006 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.347033 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.347676 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.348374 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8ddts"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.348573 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.348877 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.349443 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.350094 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.354550 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-x6dkl"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.355472 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.356336 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.358324 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.359101 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.359881 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.360677 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.361476 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.362298 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.363088 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.363965 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.364760 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.365522 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-657zm"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.366316 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.366901 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.367139 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5jm84"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.367962 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zlm6p"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.368780 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.369571 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.370466 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.371372 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-gct6s"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.372345 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.373111 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zjlhp"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.373885 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-w7xvk"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.377777 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-cfwmz"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.378364 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.380434 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-w7xvk"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.380524 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-cfwmz"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.380544 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.387453 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.407021 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.427716 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430523 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430554 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-serving-cert\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430577 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-serving-cert\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430593 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj7mw\" (UniqueName: \"kubernetes.io/projected/7eca320e-67e3-4f03-92ff-8a79363ca7ad-kube-api-access-vj7mw\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430631 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430663 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnz58\" (UniqueName: \"kubernetes.io/projected/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-kube-api-access-tnz58\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430679 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b549740-1c07-490a-be84-a135fc13c554-config\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430693 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-profile-collector-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430710 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wr4f\" (UniqueName: \"kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430746 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smdsr\" (UniqueName: \"kubernetes.io/projected/d3de6981-1f82-494a-b652-59c9677791b5-kube-api-access-smdsr\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430761 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430774 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-stats-auth\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430792 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-service-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430807 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-config\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430821 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrdbd\" (UniqueName: \"kubernetes.io/projected/73126b70-0c81-4c61-a395-8aa33f0128a8-kube-api-access-zrdbd\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430836 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-etcd-client\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430850 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7eca320e-67e3-4f03-92ff-8a79363ca7ad-service-ca-bundle\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430873 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430891 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73126b70-0c81-4c61-a395-8aa33f0128a8-metrics-tls\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430922 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpwl9\" (UniqueName: \"kubernetes.io/projected/f3fd73ed-3b8f-4cc1-893b-49e816d12386-kube-api-access-vpwl9\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430936 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-dir\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430950 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430966 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b549740-1c07-490a-be84-a135fc13c554-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.430980 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-trusted-ca\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431080 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b27af4b9-6198-4f4e-b269-2faed33e9dad-metrics-tls\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431096 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-config\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431112 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj6jv\" (UniqueName: \"kubernetes.io/projected/dedca0f9-b035-4d2d-bb74-85c016290a92-kube-api-access-vj6jv\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431149 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkxzp\" (UniqueName: \"kubernetes.io/projected/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-kube-api-access-qkxzp\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431165 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-default-certificate\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431183 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bchq\" (UniqueName: \"kubernetes.io/projected/68d328a7-6916-41a1-b3df-f21a1635c000-kube-api-access-8bchq\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431197 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-client\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431212 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431226 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431240 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431299 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431386 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431428 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cef45b47-894e-4bfe-82ff-352085dbf93e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431453 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-serving-cert\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431454 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-service-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431480 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kfq6\" (UniqueName: \"kubernetes.io/projected/fddbd3be-962c-4043-9b61-8954a0a4c796-kube-api-access-9kfq6\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431496 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b549740-1c07-490a-be84-a135fc13c554-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431511 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52c7a701-2169-4dbc-ba2f-62e40b8b4450-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431524 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-srv-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431540 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431557 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431573 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7k5m\" (UniqueName: \"kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431591 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b27af4b9-6198-4f4e-b269-2faed33e9dad-trusted-ca\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431620 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431665 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-images\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431697 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431712 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49f8c\" (UniqueName: \"kubernetes.io/projected/c7251dfa-73ce-4de2-953b-83e05ae7e3f8-kube-api-access-49f8c\") pod \"migrator-59844c95c7-rv7bj\" (UID: \"c7251dfa-73ce-4de2-953b-83e05ae7e3f8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431739 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431777 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3fd73ed-3b8f-4cc1-893b-49e816d12386-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431793 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3fd73ed-3b8f-4cc1-893b-49e816d12386-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431823 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tjdd\" (UniqueName: \"kubernetes.io/projected/cef45b47-894e-4bfe-82ff-352085dbf93e-kube-api-access-8tjdd\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431867 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25rs2\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-kube-api-access-25rs2\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.431887 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-config\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432172 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/52c7a701-2169-4dbc-ba2f-62e40b8b4450-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432196 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-metrics-certs\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432210 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99pln\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-kube-api-access-99pln\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432241 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-encryption-config\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432279 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432297 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d328a7-6916-41a1-b3df-f21a1635c000-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432321 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432355 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-policies\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432372 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sq8c\" (UniqueName: \"kubernetes.io/projected/555e97b5-6263-468f-ad51-4e32ca4ac5d4-kube-api-access-7sq8c\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432801 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-config\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.432975 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d3de6981-1f82-494a-b652-59c9677791b5-etcd-ca\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.436232 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-serving-cert\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.436469 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d3de6981-1f82-494a-b652-59c9677791b5-etcd-client\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.437229 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-ffr5d"] Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.437762 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.447470 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.466901 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.487157 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.507290 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.527989 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532827 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/52c7a701-2169-4dbc-ba2f-62e40b8b4450-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532854 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-metrics-certs\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532885 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99pln\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-kube-api-access-99pln\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532901 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-encryption-config\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532916 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532930 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d328a7-6916-41a1-b3df-f21a1635c000-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532945 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532959 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-policies\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532972 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sq8c\" (UniqueName: \"kubernetes.io/projected/555e97b5-6263-468f-ad51-4e32ca4ac5d4-kube-api-access-7sq8c\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.532990 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533009 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-serving-cert\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533025 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj7mw\" (UniqueName: \"kubernetes.io/projected/7eca320e-67e3-4f03-92ff-8a79363ca7ad-kube-api-access-vj7mw\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533044 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533060 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnz58\" (UniqueName: \"kubernetes.io/projected/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-kube-api-access-tnz58\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533073 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b549740-1c07-490a-be84-a135fc13c554-config\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533086 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-profile-collector-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533099 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wr4f\" (UniqueName: \"kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533135 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533149 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-stats-auth\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533164 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-config\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533178 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrdbd\" (UniqueName: \"kubernetes.io/projected/73126b70-0c81-4c61-a395-8aa33f0128a8-kube-api-access-zrdbd\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533192 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7eca320e-67e3-4f03-92ff-8a79363ca7ad-service-ca-bundle\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533210 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533223 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73126b70-0c81-4c61-a395-8aa33f0128a8-metrics-tls\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533236 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpwl9\" (UniqueName: \"kubernetes.io/projected/f3fd73ed-3b8f-4cc1-893b-49e816d12386-kube-api-access-vpwl9\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533248 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-dir\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533262 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533277 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b549740-1c07-490a-be84-a135fc13c554-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533289 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-trusted-ca\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533303 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b27af4b9-6198-4f4e-b269-2faed33e9dad-metrics-tls\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533314 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-config\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533327 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj6jv\" (UniqueName: \"kubernetes.io/projected/dedca0f9-b035-4d2d-bb74-85c016290a92-kube-api-access-vj6jv\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533340 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkxzp\" (UniqueName: \"kubernetes.io/projected/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-kube-api-access-qkxzp\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533352 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-default-certificate\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533366 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bchq\" (UniqueName: \"kubernetes.io/projected/68d328a7-6916-41a1-b3df-f21a1635c000-kube-api-access-8bchq\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533378 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-client\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533390 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533403 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533415 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533428 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533440 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533454 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cef45b47-894e-4bfe-82ff-352085dbf93e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533468 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-serving-cert\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533482 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kfq6\" (UniqueName: \"kubernetes.io/projected/fddbd3be-962c-4043-9b61-8954a0a4c796-kube-api-access-9kfq6\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533494 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b549740-1c07-490a-be84-a135fc13c554-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533507 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52c7a701-2169-4dbc-ba2f-62e40b8b4450-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533519 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-srv-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533531 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533543 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533557 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7k5m\" (UniqueName: \"kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533570 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b27af4b9-6198-4f4e-b269-2faed33e9dad-trusted-ca\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533583 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533596 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-images\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533609 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533623 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49f8c\" (UniqueName: \"kubernetes.io/projected/c7251dfa-73ce-4de2-953b-83e05ae7e3f8-kube-api-access-49f8c\") pod \"migrator-59844c95c7-rv7bj\" (UID: \"c7251dfa-73ce-4de2-953b-83e05ae7e3f8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533636 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3fd73ed-3b8f-4cc1-893b-49e816d12386-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533717 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3fd73ed-3b8f-4cc1-893b-49e816d12386-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533733 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tjdd\" (UniqueName: \"kubernetes.io/projected/cef45b47-894e-4bfe-82ff-352085dbf93e-kube-api-access-8tjdd\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.533750 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25rs2\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-kube-api-access-25rs2\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.534473 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.534522 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-config\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.534602 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-policies\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535285 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-images\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535315 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535329 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cef45b47-894e-4bfe-82ff-352085dbf93e-config\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535399 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52c7a701-2169-4dbc-ba2f-62e40b8b4450-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535415 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3fd73ed-3b8f-4cc1-893b-49e816d12386-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535637 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dedca0f9-b035-4d2d-bb74-85c016290a92-audit-dir\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535855 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-trusted-ca\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535884 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.535944 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.536115 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.536237 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-etcd-client\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.536458 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.537313 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3fd73ed-3b8f-4cc1-893b-49e816d12386-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.537446 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-encryption-config\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.537713 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.537950 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d328a7-6916-41a1-b3df-f21a1635c000-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.537989 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dedca0f9-b035-4d2d-bb74-85c016290a92-serving-cert\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.538293 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.538668 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cef45b47-894e-4bfe-82ff-352085dbf93e-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.538791 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.539080 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.539165 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/52c7a701-2169-4dbc-ba2f-62e40b8b4450-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.547223 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.553782 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-serving-cert\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.566831 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.587599 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.607310 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.627219 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.647230 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.667289 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.687157 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.697842 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b549740-1c07-490a-be84-a135fc13c554-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.707766 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.715108 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b549740-1c07-490a-be84-a135fc13c554-config\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.727472 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.747397 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.767414 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.787428 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.807435 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.817738 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/73126b70-0c81-4c61-a395-8aa33f0128a8-metrics-tls\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.827621 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.847172 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.867138 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.886826 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.907533 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.927779 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.947313 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:23:04 crc kubenswrapper[4849]: I1203 12:23:04.966835 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.007000 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.026790 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.047392 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.067547 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.087463 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.098010 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-srv-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.107075 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.116677 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fddbd3be-962c-4043-9b61-8954a0a4c796-profile-collector-cert\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.118732 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.127542 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.147957 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.166771 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.187513 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.207849 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.217510 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b27af4b9-6198-4f4e-b269-2faed33e9dad-metrics-tls\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.227326 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.255139 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.258052 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b27af4b9-6198-4f4e-b269-2faed33e9dad-trusted-ca\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.267882 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.287540 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.307346 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.326095 4849 request.go:700] Waited for 1.017493425s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-certs-default&limit=500&resourceVersion=0 Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.327074 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.337336 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-default-certificate\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.347339 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.357675 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-stats-auth\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.367754 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.378792 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7eca320e-67e3-4f03-92ff-8a79363ca7ad-metrics-certs\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.387781 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.396406 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7eca320e-67e3-4f03-92ff-8a79363ca7ad-service-ca-bundle\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.407276 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.427657 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.447295 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.466826 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.487207 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.507145 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.527931 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.534829 4849 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.534880 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca podName:975a1cda-589e-4583-a601-b2a1eba69a16 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:06.034867414 +0000 UTC m=+132.496715197 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca") pod "marketplace-operator-79b997595-z2mdp" (UID: "975a1cda-589e-4583-a601-b2a1eba69a16") : failed to sync configmap cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.535978 4849 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.536003 4849 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.536042 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert podName:555e97b5-6263-468f-ad51-4e32ca4ac5d4 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:06.036025983 +0000 UTC m=+132.497873766 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert") pod "olm-operator-6b444d44fb-rn5cg" (UID: "555e97b5-6263-468f-ad51-4e32ca4ac5d4") : failed to sync secret cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: E1203 12:23:05.536057 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics podName:975a1cda-589e-4583-a601-b2a1eba69a16 nodeName:}" failed. No retries permitted until 2025-12-03 12:23:06.036050981 +0000 UTC m=+132.497898763 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics") pod "marketplace-operator-79b997595-z2mdp" (UID: "975a1cda-589e-4583-a601-b2a1eba69a16") : failed to sync secret cache: timed out waiting for the condition Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.547736 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.566987 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.586904 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.607180 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.627860 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.647742 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.667209 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.687390 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.707780 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.727413 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.752056 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.767457 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.798672 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tqt6\" (UniqueName: \"kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6\") pod \"controller-manager-879f6c89f-xk97z\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.827278 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.847229 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.856321 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.856335 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.856321 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.856422 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.867423 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.887776 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.907100 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.927416 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.947301 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.967968 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 12:23:05 crc kubenswrapper[4849]: I1203 12:23:05.987233 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.007522 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.027902 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.047691 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.048022 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.048049 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.048135 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.049465 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.050319 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.050631 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/555e97b5-6263-468f-ad51-4e32ca4ac5d4-srv-cert\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.051037 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.067305 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.087491 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.108069 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.128171 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.147580 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.160622 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:23:06 crc kubenswrapper[4849]: W1203 12:23:06.165553 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27bd0071_f571_4860_b882_360fbb4fd181.slice/crio-da40ae95129c9495501901d286daea7573995e0cf73563d0802f1ff74136bcb6 WatchSource:0}: Error finding container da40ae95129c9495501901d286daea7573995e0cf73563d0802f1ff74136bcb6: Status 404 returned error can't find the container with id da40ae95129c9495501901d286daea7573995e0cf73563d0802f1ff74136bcb6 Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.166965 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.187668 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.207416 4849 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.227025 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.235523 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" event={"ID":"27bd0071-f571-4860-b882-360fbb4fd181","Type":"ContainerStarted","Data":"378ad15e10f1ce7f31b60022dcca7b94a5e22ae464705ba8570623767c258c52"} Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.235558 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" event={"ID":"27bd0071-f571-4860-b882-360fbb4fd181","Type":"ContainerStarted","Data":"da40ae95129c9495501901d286daea7573995e0cf73563d0802f1ff74136bcb6"} Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.235781 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.237266 4849 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xk97z container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.237299 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" podUID="27bd0071-f571-4860-b882-360fbb4fd181" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.259471 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smdsr\" (UniqueName: \"kubernetes.io/projected/d3de6981-1f82-494a-b652-59c9677791b5-kube-api-access-smdsr\") pod \"etcd-operator-b45778765-zlm6p\" (UID: \"d3de6981-1f82-494a-b652-59c9677791b5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.267423 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.287881 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.306984 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.326501 4849 request.go:700] Waited for 1.792627866s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.338422 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25rs2\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-kube-api-access-25rs2\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.358286 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnz58\" (UniqueName: \"kubernetes.io/projected/0ef2f4fb-55d8-4502-be08-aa6296dc8dd7-kube-api-access-tnz58\") pod \"openshift-apiserver-operator-796bbdcf4f-hx6f9\" (UID: \"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.377976 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrdbd\" (UniqueName: \"kubernetes.io/projected/73126b70-0c81-4c61-a395-8aa33f0128a8-kube-api-access-zrdbd\") pod \"dns-operator-744455d44c-8ddts\" (UID: \"73126b70-0c81-4c61-a395-8aa33f0128a8\") " pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.398271 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b549740-1c07-490a-be84-a135fc13c554-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-67sg6\" (UID: \"4b549740-1c07-490a-be84-a135fc13c554\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.419574 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj6jv\" (UniqueName: \"kubernetes.io/projected/dedca0f9-b035-4d2d-bb74-85c016290a92-kube-api-access-vj6jv\") pod \"apiserver-7bbb656c7d-cnzhf\" (UID: \"dedca0f9-b035-4d2d-bb74-85c016290a92\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.439186 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wr4f\" (UniqueName: \"kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f\") pod \"console-f9d7485db-z6qrp\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.455052 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.459572 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkxzp\" (UniqueName: \"kubernetes.io/projected/9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e-kube-api-access-qkxzp\") pod \"console-operator-58897d9998-zhk4k\" (UID: \"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e\") " pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.472435 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.478623 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bchq\" (UniqueName: \"kubernetes.io/projected/68d328a7-6916-41a1-b3df-f21a1635c000-kube-api-access-8bchq\") pod \"cluster-samples-operator-665b6dd947-f9nlf\" (UID: \"68d328a7-6916-41a1-b3df-f21a1635c000\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.487251 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.498821 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.499393 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99pln\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-kube-api-access-99pln\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.519411 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpwl9\" (UniqueName: \"kubernetes.io/projected/f3fd73ed-3b8f-4cc1-893b-49e816d12386-kube-api-access-vpwl9\") pod \"kube-storage-version-migrator-operator-b67b599dd-97cvl\" (UID: \"f3fd73ed-3b8f-4cc1-893b-49e816d12386\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.521232 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.534820 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.539506 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b27af4b9-6198-4f4e-b269-2faed33e9dad-bound-sa-token\") pod \"ingress-operator-5b745b69d9-vstm9\" (UID: \"b27af4b9-6198-4f4e-b269-2faed33e9dad\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.563917 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sq8c\" (UniqueName: \"kubernetes.io/projected/555e97b5-6263-468f-ad51-4e32ca4ac5d4-kube-api-access-7sq8c\") pod \"olm-operator-6b444d44fb-rn5cg\" (UID: \"555e97b5-6263-468f-ad51-4e32ca4ac5d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.564806 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.587403 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tjdd\" (UniqueName: \"kubernetes.io/projected/cef45b47-894e-4bfe-82ff-352085dbf93e-kube-api-access-8tjdd\") pod \"machine-api-operator-5694c8668f-trkjr\" (UID: \"cef45b47-894e-4bfe-82ff-352085dbf93e\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.590830 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.599933 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.603362 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kfq6\" (UniqueName: \"kubernetes.io/projected/fddbd3be-962c-4043-9b61-8954a0a4c796-kube-api-access-9kfq6\") pod \"catalog-operator-68c6474976-zpgwb\" (UID: \"fddbd3be-962c-4043-9b61-8954a0a4c796\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.622032 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52c7a701-2169-4dbc-ba2f-62e40b8b4450-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m45jg\" (UID: \"52c7a701-2169-4dbc-ba2f-62e40b8b4450\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.626623 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.640516 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj7mw\" (UniqueName: \"kubernetes.io/projected/7eca320e-67e3-4f03-92ff-8a79363ca7ad-kube-api-access-vj7mw\") pod \"router-default-5444994796-zdq4x\" (UID: \"7eca320e-67e3-4f03-92ff-8a79363ca7ad\") " pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.658250 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.665866 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.675138 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49f8c\" (UniqueName: \"kubernetes.io/projected/c7251dfa-73ce-4de2-953b-83e05ae7e3f8-kube-api-access-49f8c\") pod \"migrator-59844c95c7-rv7bj\" (UID: \"c7251dfa-73ce-4de2-953b-83e05ae7e3f8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" Dec 03 12:23:06 crc kubenswrapper[4849]: W1203 12:23:06.679182 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a6f5d1b_e29e_4cc2_a3b1_5f8121c4e152.slice/crio-46116c7eb5793645b8e07325db5f0f70f594899c6df00023f51d28ae96d284be WatchSource:0}: Error finding container 46116c7eb5793645b8e07325db5f0f70f594899c6df00023f51d28ae96d284be: Status 404 returned error can't find the container with id 46116c7eb5793645b8e07325db5f0f70f594899c6df00023f51d28ae96d284be Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.683945 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7k5m\" (UniqueName: \"kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m\") pod \"marketplace-operator-79b997595-z2mdp\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.703973 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.711673 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zlm6p"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.730545 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 12:23:06 crc kubenswrapper[4849]: W1203 12:23:06.735141 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3de6981_1f82_494a_b652_59c9677791b5.slice/crio-b6d0c25d3b3231c86e9878bd46851e33e81246abe86530b1f14125a23fa63096 WatchSource:0}: Error finding container b6d0c25d3b3231c86e9878bd46851e33e81246abe86530b1f14125a23fa63096: Status 404 returned error can't find the container with id b6d0c25d3b3231c86e9878bd46851e33e81246abe86530b1f14125a23fa63096 Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.739379 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.743144 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.747368 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757216 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-serving-cert\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757247 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-encryption-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757269 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757287 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl888\" (UniqueName: \"kubernetes.io/projected/c4daca20-bfbb-4929-8a37-5f75500b1afd-kube-api-access-cl888\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757303 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85ee3144-92c7-48ec-855f-a3d01fc6d89a-config\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757317 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757333 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757347 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4daca20-bfbb-4929-8a37-5f75500b1afd-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757362 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9jtn\" (UniqueName: \"kubernetes.io/projected/d2b308a7-868d-42d7-8eab-5b525b108855-kube-api-access-k9jtn\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757375 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-client\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757391 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757418 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757433 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zxz5\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757448 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnjwk\" (UniqueName: \"kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757462 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zx8q\" (UniqueName: \"kubernetes.io/projected/ad1ad7d0-2483-4332-8d18-79fc262fe94b-kube-api-access-7zx8q\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757477 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad1ad7d0-2483-4332-8d18-79fc262fe94b-tmpfs\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757491 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/13bacd23-8657-41bd-ad96-26e1ec27bd42-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757505 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757519 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757534 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757545 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757561 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfq24\" (UniqueName: \"kubernetes.io/projected/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-kube-api-access-hfq24\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757584 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85ee3144-92c7-48ec-855f-a3d01fc6d89a-serving-cert\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757599 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bbl9\" (UniqueName: \"kubernetes.io/projected/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-kube-api-access-9bbl9\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757615 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit-dir\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757632 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-images\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757767 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-serving-cert\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757787 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757802 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9x4b\" (UniqueName: \"kubernetes.io/projected/13bacd23-8657-41bd-ad96-26e1ec27bd42-kube-api-access-k9x4b\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757816 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-image-import-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757833 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757846 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757864 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzb6w\" (UniqueName: \"kubernetes.io/projected/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-kube-api-access-pzb6w\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757880 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg7zk\" (UniqueName: \"kubernetes.io/projected/f6ce1f54-c500-4887-b9c7-cd89893a62ee-kube-api-access-fg7zk\") pod \"downloads-7954f5f757-jcr69\" (UID: \"f6ce1f54-c500-4887-b9c7-cd89893a62ee\") " pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757894 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v2pv\" (UniqueName: \"kubernetes.io/projected/85ee3144-92c7-48ec-855f-a3d01fc6d89a-kube-api-access-2v2pv\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757907 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-config\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757921 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757940 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757961 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-node-pullsecrets\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757977 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.757994 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758008 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldtj9\" (UniqueName: \"kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758026 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758039 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758055 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-available-featuregates\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758068 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-service-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758096 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758117 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdv9d\" (UniqueName: \"kubernetes.io/projected/9ceeff5f-df0f-482d-b82e-2be694db679e-kube-api-access-tdv9d\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758154 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758177 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-apiservice-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758191 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758204 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758219 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758236 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758249 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9ceeff5f-df0f-482d-b82e-2be694db679e-proxy-tls\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758263 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758276 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brkj6\" (UniqueName: \"kubernetes.io/projected/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-kube-api-access-brkj6\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758290 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758303 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2b308a7-868d-42d7-8eab-5b525b108855-machine-approver-tls\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758317 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758335 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758352 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758368 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-config\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758382 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758398 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-serving-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758416 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758429 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758446 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758469 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758482 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-serving-cert\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758497 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-webhook-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758509 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758523 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-auth-proxy-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.758537 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: E1203 12:23:06.758999 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.258989866 +0000 UTC m=+133.720837650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.768625 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.772884 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8ddts"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.788360 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.797786 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.803651 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.804021 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.805138 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.807635 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.828816 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 12:23:06 crc kubenswrapper[4849]: W1203 12:23:06.832876 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod555e97b5_6263_468f_ad51_4e32ca4ac5d4.slice/crio-b9bad241f6950ce9f414afe2257a7265047dfa0fa9c6ff6a559479f3f5ee0925 WatchSource:0}: Error finding container b9bad241f6950ce9f414afe2257a7265047dfa0fa9c6ff6a559479f3f5ee0925: Status 404 returned error can't find the container with id b9bad241f6950ce9f414afe2257a7265047dfa0fa9c6ff6a559479f3f5ee0925 Dec 03 12:23:06 crc kubenswrapper[4849]: W1203 12:23:06.840807 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb27af4b9_6198_4f4e_b269_2faed33e9dad.slice/crio-fa3495f39ba9d4ab5ff21badcde24d8b2116abefdd370ed6aeabe7723027a1f4 WatchSource:0}: Error finding container fa3495f39ba9d4ab5ff21badcde24d8b2116abefdd370ed6aeabe7723027a1f4: Status 404 returned error can't find the container with id fa3495f39ba9d4ab5ff21badcde24d8b2116abefdd370ed6aeabe7723027a1f4 Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.859079 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.859514 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:06 crc kubenswrapper[4849]: E1203 12:23:06.859678 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.359659129 +0000 UTC m=+133.821506911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860541 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-csi-data-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860605 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85ee3144-92c7-48ec-855f-a3d01fc6d89a-serving-cert\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860623 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfq24\" (UniqueName: \"kubernetes.io/projected/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-kube-api-access-hfq24\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860667 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kzw\" (UniqueName: \"kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860697 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bbl9\" (UniqueName: \"kubernetes.io/projected/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-kube-api-access-9bbl9\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860760 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit-dir\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860775 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-images\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860788 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-serving-cert\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860825 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860849 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9x4b\" (UniqueName: \"kubernetes.io/projected/13bacd23-8657-41bd-ad96-26e1ec27bd42-kube-api-access-k9x4b\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860864 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-image-import-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860926 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6459e607-3af9-4658-8425-7d20afaa71da-proxy-tls\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860941 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-registration-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.860999 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861013 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861051 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-config-volume\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861070 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg7zk\" (UniqueName: \"kubernetes.io/projected/f6ce1f54-c500-4887-b9c7-cd89893a62ee-kube-api-access-fg7zk\") pod \"downloads-7954f5f757-jcr69\" (UID: \"f6ce1f54-c500-4887-b9c7-cd89893a62ee\") " pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861084 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzb6w\" (UniqueName: \"kubernetes.io/projected/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-kube-api-access-pzb6w\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861140 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v2pv\" (UniqueName: \"kubernetes.io/projected/85ee3144-92c7-48ec-855f-a3d01fc6d89a-kube-api-access-2v2pv\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861158 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-config\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861171 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861213 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861236 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-socket-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-node-pullsecrets\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861366 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861400 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861415 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861431 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861444 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldtj9\" (UniqueName: \"kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861475 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861500 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-available-featuregates\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861514 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-service-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861548 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861578 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdv9d\" (UniqueName: \"kubernetes.io/projected/9ceeff5f-df0f-482d-b82e-2be694db679e-kube-api-access-tdv9d\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861617 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hv85\" (UniqueName: \"kubernetes.io/projected/e17f032e-e01e-4a6f-9f09-ccde7159a801-kube-api-access-6hv85\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861634 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-plugins-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861756 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861785 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8bbp\" (UniqueName: \"kubernetes.io/projected/19cb4ba0-d936-4448-b004-402ec12d9bdd-kube-api-access-n8bbp\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861811 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-apiservice-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861825 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44j4t\" (UniqueName: \"kubernetes.io/projected/45259dc8-64c2-4684-9669-e8b743a14857-kube-api-access-44j4t\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861850 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861865 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861888 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861903 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19cb4ba0-d936-4448-b004-402ec12d9bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861919 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9ceeff5f-df0f-482d-b82e-2be694db679e-proxy-tls\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861933 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861949 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brkj6\" (UniqueName: \"kubernetes.io/projected/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-kube-api-access-brkj6\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861971 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.861994 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862008 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-certs\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862035 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2b308a7-868d-42d7-8eab-5b525b108855-machine-approver-tls\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862049 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862072 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862086 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862136 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862151 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-config\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862176 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-serving-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862234 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862248 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862283 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862307 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862333 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862347 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-serving-cert\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862372 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862386 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-auth-proxy-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862412 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862427 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-webhook-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862443 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-metrics-tls\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862468 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-serving-cert\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862484 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-encryption-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862498 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-mountpoint-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862512 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlhx2\" (UniqueName: \"kubernetes.io/projected/ffe44d8b-b221-4e19-a096-c49d48bc2391-kube-api-access-nlhx2\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862529 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862544 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl888\" (UniqueName: \"kubernetes.io/projected/c4daca20-bfbb-4929-8a37-5f75500b1afd-kube-api-access-cl888\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862561 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rrs5\" (UniqueName: \"kubernetes.io/projected/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-kube-api-access-2rrs5\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862593 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-key\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862611 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85ee3144-92c7-48ec-855f-a3d01fc6d89a-config\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862625 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862663 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4daca20-bfbb-4929-8a37-5f75500b1afd-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862680 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9jtn\" (UniqueName: \"kubernetes.io/projected/d2b308a7-868d-42d7-8eab-5b525b108855-kube-api-access-k9jtn\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862695 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862711 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-cabundle\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862735 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-client\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862748 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e17f032e-e01e-4a6f-9f09-ccde7159a801-cert\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862789 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862821 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862838 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zxz5\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862854 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6459e607-3af9-4658-8425-7d20afaa71da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862887 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnjwk\" (UniqueName: \"kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862903 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zx8q\" (UniqueName: \"kubernetes.io/projected/ad1ad7d0-2483-4332-8d18-79fc262fe94b-kube-api-access-7zx8q\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862935 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcfg7\" (UniqueName: \"kubernetes.io/projected/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-kube-api-access-gcfg7\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862972 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/13bacd23-8657-41bd-ad96-26e1ec27bd42-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862987 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863001 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad1ad7d0-2483-4332-8d18-79fc262fe94b-tmpfs\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863020 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-node-bootstrap-token\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863044 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863058 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863074 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.863089 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddxx9\" (UniqueName: \"kubernetes.io/projected/6459e607-3af9-4658-8425-7d20afaa71da-kube-api-access-ddxx9\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.864784 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.865274 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.865768 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-config\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.865780 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit-dir\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.866324 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-images\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.866578 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.868578 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-audit\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.869699 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.869851 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85ee3144-92c7-48ec-855f-a3d01fc6d89a-serving-cert\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: E1203 12:23:06.869998 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.369987105 +0000 UTC m=+133.831834889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.870825 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-serving-cert\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.871667 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.872203 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9ceeff5f-df0f-482d-b82e-2be694db679e-proxy-tls\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.875043 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-config\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.875118 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.876911 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.877026 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.877118 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.877219 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-serving-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.877623 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-node-pullsecrets\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.878631 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ceeff5f-df0f-482d-b82e-2be694db679e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.880672 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d2b308a7-868d-42d7-8eab-5b525b108855-auth-proxy-config\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.880742 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.862746 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-image-import-ca\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.881199 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.881774 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85ee3144-92c7-48ec-855f-a3d01fc6d89a-config\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.882087 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.882819 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.883964 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.884268 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.886574 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.934433 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-service-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.935194 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zhk4k"] Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.935832 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-encryption-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.936813 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-serving-cert\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.937103 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.937554 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c4daca20-bfbb-4929-8a37-5f75500b1afd-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.937915 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.938251 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-etcd-client\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.941906 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.942664 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-apiservice-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.945030 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-config\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.945061 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.945399 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d2b308a7-868d-42d7-8eab-5b525b108855-machine-approver-tls\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.945763 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.949633 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad1ad7d0-2483-4332-8d18-79fc262fe94b-webhook-cert\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.950280 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.950408 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.950499 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ad1ad7d0-2483-4332-8d18-79fc262fe94b-tmpfs\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.954176 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.955046 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.962691 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.968141 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-available-featuregates\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.969743 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.970356 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.970595 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfq24\" (UniqueName: \"kubernetes.io/projected/fcb38731-cca7-4ab2-a751-7ccd17fd2a27-kube-api-access-hfq24\") pod \"apiserver-76f77b778f-5jm84\" (UID: \"fcb38731-cca7-4ab2-a751-7ccd17fd2a27\") " pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972229 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:06 crc kubenswrapper[4849]: E1203 12:23:06.972520 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.47250244 +0000 UTC m=+133.934350223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972710 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972803 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972865 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hv85\" (UniqueName: \"kubernetes.io/projected/e17f032e-e01e-4a6f-9f09-ccde7159a801-kube-api-access-6hv85\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972884 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-plugins-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972910 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8bbp\" (UniqueName: \"kubernetes.io/projected/19cb4ba0-d936-4448-b004-402ec12d9bdd-kube-api-access-n8bbp\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972943 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44j4t\" (UniqueName: \"kubernetes.io/projected/45259dc8-64c2-4684-9669-e8b743a14857-kube-api-access-44j4t\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.972969 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19cb4ba0-d936-4448-b004-402ec12d9bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973005 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-certs\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973066 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973099 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-metrics-tls\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973135 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-mountpoint-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973186 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlhx2\" (UniqueName: \"kubernetes.io/projected/ffe44d8b-b221-4e19-a096-c49d48bc2391-kube-api-access-nlhx2\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973220 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rrs5\" (UniqueName: \"kubernetes.io/projected/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-kube-api-access-2rrs5\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973237 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-key\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973263 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-cabundle\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973278 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e17f032e-e01e-4a6f-9f09-ccde7159a801-cert\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973321 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6459e607-3af9-4658-8425-7d20afaa71da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973364 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcfg7\" (UniqueName: \"kubernetes.io/projected/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-kube-api-access-gcfg7\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973394 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-node-bootstrap-token\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973428 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddxx9\" (UniqueName: \"kubernetes.io/projected/6459e607-3af9-4658-8425-7d20afaa71da-kube-api-access-ddxx9\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973446 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-csi-data-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973474 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kzw\" (UniqueName: \"kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973521 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6459e607-3af9-4658-8425-7d20afaa71da-proxy-tls\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973716 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-registration-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973770 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-config-volume\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973821 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-socket-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973927 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-socket-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.973971 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-mountpoint-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.974028 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-plugins-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.975339 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-cabundle\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: E1203 12:23:06.976072 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.476056576 +0000 UTC m=+133.937904359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.977076 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.979403 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-csi-data-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.980219 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.980215 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6459e607-3af9-4658-8425-7d20afaa71da-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.980382 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45259dc8-64c2-4684-9669-e8b743a14857-registration-dir\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.980875 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-config-volume\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.982773 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.982976 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19cb4ba0-d936-4448-b004-402ec12d9bdd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.983030 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg7zk\" (UniqueName: \"kubernetes.io/projected/f6ce1f54-c500-4887-b9c7-cd89893a62ee-kube-api-access-fg7zk\") pod \"downloads-7954f5f757-jcr69\" (UID: \"f6ce1f54-c500-4887-b9c7-cd89893a62ee\") " pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.983145 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e17f032e-e01e-4a6f-9f09-ccde7159a801-cert\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.984075 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-signing-key\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.984364 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzb6w\" (UniqueName: \"kubernetes.io/projected/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-kube-api-access-pzb6w\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.984854 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/13bacd23-8657-41bd-ad96-26e1ec27bd42-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.991869 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.992209 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23e004d1-bd4a-4ff4-a4ca-f9cca1c19580-serving-cert\") pod \"openshift-config-operator-7777fb866f-v9bf5\" (UID: \"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.992372 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:06 crc kubenswrapper[4849]: I1203 12:23:06.987858 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-metrics-tls\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.000580 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-node-bootstrap-token\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.001109 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v2pv\" (UniqueName: \"kubernetes.io/projected/85ee3144-92c7-48ec-855f-a3d01fc6d89a-kube-api-access-2v2pv\") pod \"service-ca-operator-777779d784-xpg5z\" (UID: \"85ee3144-92c7-48ec-855f-a3d01fc6d89a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.007056 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.007459 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6459e607-3af9-4658-8425-7d20afaa71da-proxy-tls\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.008510 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.009291 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ffe44d8b-b221-4e19-a096-c49d48bc2391-certs\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.011803 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.016514 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.022720 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9x4b\" (UniqueName: \"kubernetes.io/projected/13bacd23-8657-41bd-ad96-26e1ec27bd42-kube-api-access-k9x4b\") pod \"multus-admission-controller-857f4d67dd-657zm\" (UID: \"13bacd23-8657-41bd-ad96-26e1ec27bd42\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.026606 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bbl9\" (UniqueName: \"kubernetes.io/projected/ea7b6a9d-c590-4658-af57-2e0d68f31a4d-kube-api-access-9bbl9\") pod \"openshift-controller-manager-operator-756b6f6bc6-gt4qk\" (UID: \"ea7b6a9d-c590-4658-af57-2e0d68f31a4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.032387 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.070195 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brkj6\" (UniqueName: \"kubernetes.io/projected/99e679dc-a5f9-406b-b6c7-d5fcbddb6a31-kube-api-access-brkj6\") pod \"authentication-operator-69f744f599-x6dkl\" (UID: \"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.077516 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.078044 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.578030634 +0000 UTC m=+134.039878416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.079311 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.085912 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-ls6fj\" (UID: \"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.090147 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zxz5\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.092947 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-trkjr"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.115346 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.123091 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9jtn\" (UniqueName: \"kubernetes.io/projected/d2b308a7-868d-42d7-8eab-5b525b108855-kube-api-access-k9jtn\") pod \"machine-approver-56656f9798-pnncr\" (UID: \"d2b308a7-868d-42d7-8eab-5b525b108855\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.152922 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.153002 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72044bfe-dd36-43dd-823f-1dfc7b07d8ac-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvg2x\" (UID: \"72044bfe-dd36-43dd-823f-1dfc7b07d8ac\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.178563 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.178916 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.67890442 +0000 UTC m=+134.140752203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.181496 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldtj9\" (UniqueName: \"kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9\") pod \"oauth-openshift-558db77b4-f8svg\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.182346 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl888\" (UniqueName: \"kubernetes.io/projected/c4daca20-bfbb-4929-8a37-5f75500b1afd-kube-api-access-cl888\") pod \"package-server-manager-789f6589d5-6q7bv\" (UID: \"c4daca20-bfbb-4929-8a37-5f75500b1afd\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.194337 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.205460 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.215091 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.220447 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnjwk\" (UniqueName: \"kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk\") pod \"route-controller-manager-6576b87f9c-v6w56\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.228038 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdv9d\" (UniqueName: \"kubernetes.io/projected/9ceeff5f-df0f-482d-b82e-2be694db679e-kube-api-access-tdv9d\") pod \"machine-config-operator-74547568cd-nvzf6\" (UID: \"9ceeff5f-df0f-482d-b82e-2be694db679e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.240555 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" event={"ID":"d3de6981-1f82-494a-b652-59c9677791b5","Type":"ContainerStarted","Data":"b6d0c25d3b3231c86e9878bd46851e33e81246abe86530b1f14125a23fa63096"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.242188 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" event={"ID":"4b549740-1c07-490a-be84-a135fc13c554","Type":"ContainerStarted","Data":"6967311c55c7dc07f7f935c07d0d8d80d79a56ffa9e5af1f8c575fde1e86f8c3"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.242230 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" event={"ID":"4b549740-1c07-490a-be84-a135fc13c554","Type":"ContainerStarted","Data":"586dd73236aa249127ad1930089bb785f9f27b69c7b0131905ca2fef2bf1c060"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.245119 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zx8q\" (UniqueName: \"kubernetes.io/projected/ad1ad7d0-2483-4332-8d18-79fc262fe94b-kube-api-access-7zx8q\") pod \"packageserver-d55dfcdfc-cgdl8\" (UID: \"ad1ad7d0-2483-4332-8d18-79fc262fe94b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.255963 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" event={"ID":"555e97b5-6263-468f-ad51-4e32ca4ac5d4","Type":"ContainerStarted","Data":"9a806a02afef15be2932fffd1288065ea0959030c4993419d0a4578d7aaa8b45"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.256004 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" event={"ID":"555e97b5-6263-468f-ad51-4e32ca4ac5d4","Type":"ContainerStarted","Data":"b9bad241f6950ce9f414afe2257a7265047dfa0fa9c6ff6a559479f3f5ee0925"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.256708 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.257397 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" event={"ID":"73126b70-0c81-4c61-a395-8aa33f0128a8","Type":"ContainerStarted","Data":"58ea38c9bb206431d71456931e6c07675ad1d74458127c281fded9dacda0c11a"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.258118 4849 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rn5cg container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.258160 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" podUID="555e97b5-6263-468f-ad51-4e32ca4ac5d4" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.263740 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" event={"ID":"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7","Type":"ContainerStarted","Data":"a4b5e43486fea44f082004af3385ea11300929db057e052a1486d767b0377266"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.263782 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" event={"ID":"0ef2f4fb-55d8-4502-be08-aa6296dc8dd7","Type":"ContainerStarted","Data":"60bc8f9e332bedede51cb19625794407dfdc7f2c5f7a7dd1c15f03f29f0a8738"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.265115 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zdq4x" event={"ID":"7eca320e-67e3-4f03-92ff-8a79363ca7ad","Type":"ContainerStarted","Data":"588c9e107a5bd45e7f15fb309642a7fab679797165359903355f0dff3450ade2"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.265596 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.268328 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" event={"ID":"cef45b47-894e-4bfe-82ff-352085dbf93e","Type":"ContainerStarted","Data":"f86185214324652004a1599e3078ef3c1dfca200eb16bedee5938dcc338faf6f"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.271669 4849 generic.go:334] "Generic (PLEG): container finished" podID="dedca0f9-b035-4d2d-bb74-85c016290a92" containerID="76dac8707af7c25e33c9ff3dbb9aaa81d690bb7d6825b0826c18212742807545" exitCode=0 Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.271750 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" event={"ID":"dedca0f9-b035-4d2d-bb74-85c016290a92","Type":"ContainerDied","Data":"76dac8707af7c25e33c9ff3dbb9aaa81d690bb7d6825b0826c18212742807545"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.271777 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" event={"ID":"dedca0f9-b035-4d2d-bb74-85c016290a92","Type":"ContainerStarted","Data":"ccecf479da862f131077b1735edd416e95db2bbc997d4dec5c95565ffedfb368"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.273212 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlhx2\" (UniqueName: \"kubernetes.io/projected/ffe44d8b-b221-4e19-a096-c49d48bc2391-kube-api-access-nlhx2\") pod \"machine-config-server-ffr5d\" (UID: \"ffe44d8b-b221-4e19-a096-c49d48bc2391\") " pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.274197 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" event={"ID":"b27af4b9-6198-4f4e-b269-2faed33e9dad","Type":"ContainerStarted","Data":"004dbac018cd52f9c31b4d58fab25191698955b53717d4932e6ade48fabbd20b"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.274227 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" event={"ID":"b27af4b9-6198-4f4e-b269-2faed33e9dad","Type":"ContainerStarted","Data":"fa3495f39ba9d4ab5ff21badcde24d8b2116abefdd370ed6aeabe7723027a1f4"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.274237 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.277550 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" event={"ID":"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e","Type":"ContainerStarted","Data":"de589ce7bcc65e3e817bf3058f0eaf15588cd690d21d53038a57c4b9a1b42b7e"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.280427 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.281452 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.781434624 +0000 UTC m=+134.243282406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.282117 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rrs5\" (UniqueName: \"kubernetes.io/projected/6b4406f5-12c8-47b9-9e38-792a0c6e53ed-kube-api-access-2rrs5\") pod \"service-ca-9c57cc56f-zjlhp\" (UID: \"6b4406f5-12c8-47b9-9e38-792a0c6e53ed\") " pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.282202 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6qrp" event={"ID":"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152","Type":"ContainerStarted","Data":"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.282223 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6qrp" event={"ID":"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152","Type":"ContainerStarted","Data":"46116c7eb5793645b8e07325db5f0f70f594899c6df00023f51d28ae96d284be"} Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.290681 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.297833 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ffr5d" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.302303 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hv85\" (UniqueName: \"kubernetes.io/projected/e17f032e-e01e-4a6f-9f09-ccde7159a801-kube-api-access-6hv85\") pod \"ingress-canary-gct6s\" (UID: \"e17f032e-e01e-4a6f-9f09-ccde7159a801\") " pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.321213 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8bbp\" (UniqueName: \"kubernetes.io/projected/19cb4ba0-d936-4448-b004-402ec12d9bdd-kube-api-access-n8bbp\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4cms\" (UID: \"19cb4ba0-d936-4448-b004-402ec12d9bdd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.354782 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44j4t\" (UniqueName: \"kubernetes.io/projected/45259dc8-64c2-4684-9669-e8b743a14857-kube-api-access-44j4t\") pod \"csi-hostpathplugin-cfwmz\" (UID: \"45259dc8-64c2-4684-9669-e8b743a14857\") " pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.360368 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.366383 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.372758 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcfg7\" (UniqueName: \"kubernetes.io/projected/8d71eb0e-8baf-4813-b29e-d8b0505f3ce4-kube-api-access-gcfg7\") pod \"dns-default-w7xvk\" (UID: \"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4\") " pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.385059 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.388938 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.888925608 +0000 UTC m=+134.350773391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.389846 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddxx9\" (UniqueName: \"kubernetes.io/projected/6459e607-3af9-4658-8425-7d20afaa71da-kube-api-access-ddxx9\") pod \"machine-config-controller-84d6567774-qxlsn\" (UID: \"6459e607-3af9-4658-8425-7d20afaa71da\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.419386 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kzw\" (UniqueName: \"kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw\") pod \"collect-profiles-29412735-hgspx\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.425340 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.448930 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.480342 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.486325 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.486577 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.486968 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:07.986939005 +0000 UTC m=+134.448786788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.518477 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.524047 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.533317 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.563720 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.568393 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-gct6s" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.573787 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.591496 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.591842 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.091823697 +0000 UTC m=+134.553671480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.594240 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.693973 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.694104 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.194086186 +0000 UTC m=+134.655933969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.694475 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.694875 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.194865413 +0000 UTC m=+134.656713196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.795549 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.796101 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.296083007 +0000 UTC m=+134.757930789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.828223 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.849157 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.851174 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.854340 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb"] Dec 03 12:23:07 crc kubenswrapper[4849]: I1203 12:23:07.896909 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:07 crc kubenswrapper[4849]: E1203 12:23:07.897446 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.397433772 +0000 UTC m=+134.859281555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.005848 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.006186 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.506173123 +0000 UTC m=+134.968020906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.117778 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.118341 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.618330616 +0000 UTC m=+135.080178399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.218518 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.218946 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.718928344 +0000 UTC m=+135.180776127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.229445 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-z6qrp" podStartSLOduration=116.229431039 podStartE2EDuration="1m56.229431039s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:08.228045974 +0000 UTC m=+134.689893757" watchObservedRunningTime="2025-12-03 12:23:08.229431039 +0000 UTC m=+134.691278822" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.302590 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" event={"ID":"b27af4b9-6198-4f4e-b269-2faed33e9dad","Type":"ContainerStarted","Data":"8191e0f5001f6e03d60adc58b823f0254b26a6b77cf1abfa653fe65a1008434e"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.305124 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" event={"ID":"f3fd73ed-3b8f-4cc1-893b-49e816d12386","Type":"ContainerStarted","Data":"03b5a3d6aa72d2f54fcaf8c8e0d5b60f8473a663f658259ae133d7f6fb326f73"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.307305 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" event={"ID":"c7251dfa-73ce-4de2-953b-83e05ae7e3f8","Type":"ContainerStarted","Data":"14e1821ccb13df9d211ae3ee651bf9714909dd0e58e177019862dabf375b7bb6"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.307335 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" event={"ID":"c7251dfa-73ce-4de2-953b-83e05ae7e3f8","Type":"ContainerStarted","Data":"da85bd5609e2839882709664c97defd3d4a5fbe2e7f7d700d43bcf9a4434c929"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.309960 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" event={"ID":"73126b70-0c81-4c61-a395-8aa33f0128a8","Type":"ContainerStarted","Data":"d368946042d23a6d202c0af25371c6c5d80e1263273e6f506c8874c48fc7e3cb"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.314684 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" event={"ID":"68d328a7-6916-41a1-b3df-f21a1635c000","Type":"ContainerStarted","Data":"46964c51cf43b351a5a87b881fe6874b8f2fac2e0f4b313ae6152c7b408686c2"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.314725 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" event={"ID":"68d328a7-6916-41a1-b3df-f21a1635c000","Type":"ContainerStarted","Data":"b23257139d358ce7a0f731d13b175c04606b44a3abc982686bf26bbb08e02fbc"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.321843 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.322156 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.822144848 +0000 UTC m=+135.283992631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.329255 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" event={"ID":"975a1cda-589e-4583-a601-b2a1eba69a16","Type":"ContainerStarted","Data":"263ed43668460be7b335116729a7d7abd74b39547dcac7a9c43a3594e08c87a3"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.341405 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ffr5d" event={"ID":"ffe44d8b-b221-4e19-a096-c49d48bc2391","Type":"ContainerStarted","Data":"c4584f5b6b620335bf223de75e825235e602dfb53db8204932c343b54bb641b4"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.341442 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ffr5d" event={"ID":"ffe44d8b-b221-4e19-a096-c49d48bc2391","Type":"ContainerStarted","Data":"4aaec47b8f96a345e16e80d94f88d95a3088b0c70c3bda1eb4fe3ae8d05f1e75"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.356561 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" event={"ID":"dedca0f9-b035-4d2d-bb74-85c016290a92","Type":"ContainerStarted","Data":"a23238b2a43f58a45686b1d0e4320c6c9a7bcbf91f8b32f1a36d43fae0013f10"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.361556 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" event={"ID":"d3de6981-1f82-494a-b652-59c9677791b5","Type":"ContainerStarted","Data":"4883afe1ca98fa330328f1befb730ecf4f1dc5d82439d5938d1bc5572a915b07"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.364044 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zdq4x" event={"ID":"7eca320e-67e3-4f03-92ff-8a79363ca7ad","Type":"ContainerStarted","Data":"558007a83a6c9d380db697124e742dbf2ebd2dc68f0def7a525fc1c61cd7037a"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.365961 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" event={"ID":"d2b308a7-868d-42d7-8eab-5b525b108855","Type":"ContainerStarted","Data":"26fa0f21ee168a00015e2fa10bacaf48a4d6b8f9a366f9fed5a1ea583f8b8fe2"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.367580 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" event={"ID":"cef45b47-894e-4bfe-82ff-352085dbf93e","Type":"ContainerStarted","Data":"28787345a350249041421915524dc9df13b2e99cf9c4f731001eea63ff48b78f"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.368612 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" event={"ID":"fddbd3be-962c-4043-9b61-8954a0a4c796","Type":"ContainerStarted","Data":"26baf7e38f46af1cfea6bc51adb0067672b24506d7bff797e53b4d755d65959b"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.372474 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" event={"ID":"9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e","Type":"ContainerStarted","Data":"29995c4aa8479b4fc9db7063bde5079ef8a9e8d2b86a1c321217b387c5b51028"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.372528 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.373861 4849 patch_prober.go:28] interesting pod/console-operator-58897d9998-zhk4k container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.373885 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" podUID="9c55f0db-5c1e-4e4c-bfc9-5dabdb076c6e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.373948 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" event={"ID":"52c7a701-2169-4dbc-ba2f-62e40b8b4450","Type":"ContainerStarted","Data":"ac02fcb5ff8b0f849ddc64f60c068ff3d7c17c7987644f95751d7190ef787073"} Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.378769 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.423425 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.424699 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:08.924684069 +0000 UTC m=+135.386531852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.452468 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5jm84"] Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.469068 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj"] Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.529380 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.531863 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.031851604 +0000 UTC m=+135.493699388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.630825 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.631393 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.131380442 +0000 UTC m=+135.593228225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.653450 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5"] Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.667846 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" podStartSLOduration=116.667836304 podStartE2EDuration="1m56.667836304s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:08.6669729 +0000 UTC m=+135.128820682" watchObservedRunningTime="2025-12-03 12:23:08.667836304 +0000 UTC m=+135.129684087" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.733659 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.734216 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.234204318 +0000 UTC m=+135.696052101 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.753250 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-hx6f9" podStartSLOduration=117.753234051 podStartE2EDuration="1m57.753234051s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:08.710155954 +0000 UTC m=+135.172003737" watchObservedRunningTime="2025-12-03 12:23:08.753234051 +0000 UTC m=+135.215081834" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.839046 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.839419 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.339405315 +0000 UTC m=+135.801253097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.862758 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-67sg6" podStartSLOduration=116.862735197 podStartE2EDuration="1m56.862735197s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:08.86130707 +0000 UTC m=+135.323154853" watchObservedRunningTime="2025-12-03 12:23:08.862735197 +0000 UTC m=+135.324582979" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.876412 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:08 crc kubenswrapper[4849]: W1203 12:23:08.905228 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23e004d1_bd4a_4ff4_a4ca_f9cca1c19580.slice/crio-315b293c3017037e5376bc02a4aa5c62d2d3ae9260672a1fb681e6e3211de756 WatchSource:0}: Error finding container 315b293c3017037e5376bc02a4aa5c62d2d3ae9260672a1fb681e6e3211de756: Status 404 returned error can't find the container with id 315b293c3017037e5376bc02a4aa5c62d2d3ae9260672a1fb681e6e3211de756 Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.923505 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:08 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:08 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:08 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.923544 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:08 crc kubenswrapper[4849]: I1203 12:23:08.947894 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:08 crc kubenswrapper[4849]: E1203 12:23:08.948462 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.448440283 +0000 UTC m=+135.910288065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.050065 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.050428 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.550414821 +0000 UTC m=+136.012262604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.098387 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rn5cg" podStartSLOduration=117.098371294 podStartE2EDuration="1m57.098371294s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.067825961 +0000 UTC m=+135.529673743" watchObservedRunningTime="2025-12-03 12:23:09.098371294 +0000 UTC m=+135.560219067" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.098689 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-x6dkl"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.119343 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.154350 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.154810 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.654799963 +0000 UTC m=+136.116647746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: W1203 12:23:09.174740 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72044bfe_dd36_43dd_823f_1dfc7b07d8ac.slice/crio-d29a21139968c46eb8b2d488a6c544bc31ebf61112af655d011ac744ed339796 WatchSource:0}: Error finding container d29a21139968c46eb8b2d488a6c544bc31ebf61112af655d011ac744ed339796: Status 404 returned error can't find the container with id d29a21139968c46eb8b2d488a6c544bc31ebf61112af655d011ac744ed339796 Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.203296 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-vstm9" podStartSLOduration=117.203280703 podStartE2EDuration="1m57.203280703s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.189495183 +0000 UTC m=+135.651342966" watchObservedRunningTime="2025-12-03 12:23:09.203280703 +0000 UTC m=+135.665128485" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.249026 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" podStartSLOduration=117.249012001 podStartE2EDuration="1m57.249012001s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.248577824 +0000 UTC m=+135.710425606" watchObservedRunningTime="2025-12-03 12:23:09.249012001 +0000 UTC m=+135.710859783" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.255293 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.255738 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.755724015 +0000 UTC m=+136.217571798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.271492 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-ffr5d" podStartSLOduration=5.271476094 podStartE2EDuration="5.271476094s" podCreationTimestamp="2025-12-03 12:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.270930177 +0000 UTC m=+135.732777960" watchObservedRunningTime="2025-12-03 12:23:09.271476094 +0000 UTC m=+135.733323877" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.308035 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" podStartSLOduration=118.308018198 podStartE2EDuration="1m58.308018198s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.306008288 +0000 UTC m=+135.767856070" watchObservedRunningTime="2025-12-03 12:23:09.308018198 +0000 UTC m=+135.769865981" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.335282 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-657zm"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.337440 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zjlhp"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.354362 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zlm6p" podStartSLOduration=117.354349354 podStartE2EDuration="1m57.354349354s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.351987 +0000 UTC m=+135.813834783" watchObservedRunningTime="2025-12-03 12:23:09.354349354 +0000 UTC m=+135.816197137" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.365139 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.365236 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.365493 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.865482494 +0000 UTC m=+136.327330277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.374342 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.393519 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zdq4x" podStartSLOduration=117.393505564 podStartE2EDuration="1m57.393505564s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.388076252 +0000 UTC m=+135.849924025" watchObservedRunningTime="2025-12-03 12:23:09.393505564 +0000 UTC m=+135.855353348" Dec 03 12:23:09 crc kubenswrapper[4849]: W1203 12:23:09.397112 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85ee3144_92c7_48ec_855f_a3d01fc6d89a.slice/crio-a431a1b663e9ecc2e1923c0615c5a9abb92865bfcf3abd2a7593d5215a59bc49 WatchSource:0}: Error finding container a431a1b663e9ecc2e1923c0615c5a9abb92865bfcf3abd2a7593d5215a59bc49: Status 404 returned error can't find the container with id a431a1b663e9ecc2e1923c0615c5a9abb92865bfcf3abd2a7593d5215a59bc49 Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.397336 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" event={"ID":"fddbd3be-962c-4043-9b61-8954a0a4c796","Type":"ContainerStarted","Data":"bbba31ae114d697648b9597c47d5a69296e1e6643cdb77717e0e39a651ec91ae"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.397907 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.400963 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" event={"ID":"68d328a7-6916-41a1-b3df-f21a1635c000","Type":"ContainerStarted","Data":"2997dfc8c11eed60557cfe097503284a231f3d2441dde308df5bd73dae1b7da4"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.403312 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.412444 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" event={"ID":"6b4406f5-12c8-47b9-9e38-792a0c6e53ed","Type":"ContainerStarted","Data":"c5964dc911968712a72384e875c2b2b5b7dd390f82fbda2ad6fe4a398edb39ff"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.413988 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.422183 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" event={"ID":"cef45b47-894e-4bfe-82ff-352085dbf93e","Type":"ContainerStarted","Data":"0952df816b7f01e1f0f4ec8c5a83882c7ed51488e75e027185138f25206df867"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.434808 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.443487 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" event={"ID":"c7251dfa-73ce-4de2-953b-83e05ae7e3f8","Type":"ContainerStarted","Data":"eafc3f63d1c557b3cbaa2da8771c794f0d975cf3cc6cfda103e2e92de90dd244"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.446041 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" event={"ID":"73126b70-0c81-4c61-a395-8aa33f0128a8","Type":"ContainerStarted","Data":"4851e694704a4805c0ffaf4e95751b368628d99d66b0268144248f607370caea"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.458689 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" event={"ID":"d2b308a7-868d-42d7-8eab-5b525b108855","Type":"ContainerStarted","Data":"0f20ba999b95f896a647cdf579de7b7241f91bd1ef4b9645c606ffdd289aa868"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.458736 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" event={"ID":"d2b308a7-868d-42d7-8eab-5b525b108855","Type":"ContainerStarted","Data":"09ab4b436e264cea29db59cdca4253c4da992516bf3e592a48e20f595517d25d"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.461021 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-f9nlf" podStartSLOduration=118.461009545 podStartE2EDuration="1m58.461009545s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.460610635 +0000 UTC m=+135.922458418" watchObservedRunningTime="2025-12-03 12:23:09.461009545 +0000 UTC m=+135.922857328" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.466609 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.467845 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:09.967829794 +0000 UTC m=+136.429677577 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.482065 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-jcr69"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.482104 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.490826 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" event={"ID":"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31","Type":"ContainerStarted","Data":"e5a7e72494ef66b7a551bdf4dfd4e4721828b2dcad5776eaf38316e59cc79429"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.490863 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" event={"ID":"99e679dc-a5f9-406b-b6c7-d5fcbddb6a31","Type":"ContainerStarted","Data":"a73882b019123c6ca4c4d4decf822c5e487ff5f5f95b11b835eeec1a894bdf98"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.501054 4849 generic.go:334] "Generic (PLEG): container finished" podID="fcb38731-cca7-4ab2-a751-7ccd17fd2a27" containerID="682617e7612bc33ead71444a8e152103f44fd565d1ea1d98c93a16cbb8fa649c" exitCode=0 Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.501120 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" event={"ID":"fcb38731-cca7-4ab2-a751-7ccd17fd2a27","Type":"ContainerDied","Data":"682617e7612bc33ead71444a8e152103f44fd565d1ea1d98c93a16cbb8fa649c"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.501161 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" event={"ID":"fcb38731-cca7-4ab2-a751-7ccd17fd2a27","Type":"ContainerStarted","Data":"3aa3f9cf344240b734e3e426698d1cf268440520613c0ef17953159081544610"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.520577 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" event={"ID":"975a1cda-589e-4583-a601-b2a1eba69a16","Type":"ContainerStarted","Data":"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.520809 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.529419 4849 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-z2mdp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.529618 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.529803 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.530404 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zpgwb" podStartSLOduration=117.530393042 podStartE2EDuration="1m57.530393042s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.521044007 +0000 UTC m=+135.982891790" watchObservedRunningTime="2025-12-03 12:23:09.530393042 +0000 UTC m=+135.992240825" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.535268 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.535296 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" event={"ID":"52c7a701-2169-4dbc-ba2f-62e40b8b4450","Type":"ContainerStarted","Data":"0fce68aabba587c8524763db08725afe7e0bff5e9fb232cc718bb098287fcb5b"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.535684 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.541495 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.541529 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-w7xvk"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.545454 4849 generic.go:334] "Generic (PLEG): container finished" podID="23e004d1-bd4a-4ff4-a4ca-f9cca1c19580" containerID="f80c474759aa3218102ada872260e9f48dbe5d7240db35baaecba3bcd1001de4" exitCode=0 Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.547139 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" event={"ID":"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580","Type":"ContainerDied","Data":"f80c474759aa3218102ada872260e9f48dbe5d7240db35baaecba3bcd1001de4"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.547167 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" event={"ID":"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580","Type":"ContainerStarted","Data":"315b293c3017037e5376bc02a4aa5c62d2d3ae9260672a1fb681e6e3211de756"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.547609 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-gct6s"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.550897 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-cfwmz"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.550953 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv"] Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.558602 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-trkjr" podStartSLOduration=117.55859058 podStartE2EDuration="1m57.55859058s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.558026187 +0000 UTC m=+136.019873971" watchObservedRunningTime="2025-12-03 12:23:09.55859058 +0000 UTC m=+136.020438362" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.568635 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.573435 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.070092665 +0000 UTC m=+136.531940448 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.575596 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" event={"ID":"f3fd73ed-3b8f-4cc1-893b-49e816d12386","Type":"ContainerStarted","Data":"03695bf807dbad9b9421943f8726d8d768dd82dc9f022257714f32e779d9b406"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.593060 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" event={"ID":"72044bfe-dd36-43dd-823f-1dfc7b07d8ac","Type":"ContainerStarted","Data":"d29a21139968c46eb8b2d488a6c544bc31ebf61112af655d011ac744ed339796"} Dec 03 12:23:09 crc kubenswrapper[4849]: W1203 12:23:09.602049 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad1ad7d0_2483_4332_8d18_79fc262fe94b.slice/crio-9fc8e93ea855f64bd6754e2a925e2258d6fb69df2ecb1de61379263dce40807a WatchSource:0}: Error finding container 9fc8e93ea855f64bd6754e2a925e2258d6fb69df2ecb1de61379263dce40807a: Status 404 returned error can't find the container with id 9fc8e93ea855f64bd6754e2a925e2258d6fb69df2ecb1de61379263dce40807a Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.606258 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" event={"ID":"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5","Type":"ContainerStarted","Data":"086e141d7aa96d008b1b69d2c60fe8cfe5587b7799b52c9de27f5988758c8874"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.606295 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" event={"ID":"5cc4422a-f1a4-4e3f-9cb9-92d58660f7c5","Type":"ContainerStarted","Data":"dd9aee8f6719fd48dd43e56006e09c3bff1eba56d9d772835fdef7e28d6138ee"} Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.625947 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-zhk4k" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.626671 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pnncr" podStartSLOduration=118.626657419 podStartE2EDuration="1m58.626657419s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.626225357 +0000 UTC m=+136.088073139" watchObservedRunningTime="2025-12-03 12:23:09.626657419 +0000 UTC m=+136.088505202" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.656402 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rv7bj" podStartSLOduration=117.656386818 podStartE2EDuration="1m57.656386818s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.65599437 +0000 UTC m=+136.117842153" watchObservedRunningTime="2025-12-03 12:23:09.656386818 +0000 UTC m=+136.118234602" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.669737 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.669866 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.169844331 +0000 UTC m=+136.631692114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.670064 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.671213 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.171199751 +0000 UTC m=+136.633047534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.703030 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" podStartSLOduration=117.70301303 podStartE2EDuration="1m57.70301303s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.69888255 +0000 UTC m=+136.160730333" watchObservedRunningTime="2025-12-03 12:23:09.70301303 +0000 UTC m=+136.164860812" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.780018 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.780494 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.280478307 +0000 UTC m=+136.742326090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.818532 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-x6dkl" podStartSLOduration=118.81849248 podStartE2EDuration="1m58.81849248s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.784532082 +0000 UTC m=+136.246379864" watchObservedRunningTime="2025-12-03 12:23:09.81849248 +0000 UTC m=+136.280340262" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.819601 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8ddts" podStartSLOduration=117.81959002400001 podStartE2EDuration="1m57.819590024s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.817354799 +0000 UTC m=+136.279202583" watchObservedRunningTime="2025-12-03 12:23:09.819590024 +0000 UTC m=+136.281437806" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.855506 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-97cvl" podStartSLOduration=117.855491463 podStartE2EDuration="1m57.855491463s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.854191847 +0000 UTC m=+136.316039631" watchObservedRunningTime="2025-12-03 12:23:09.855491463 +0000 UTC m=+136.317339246" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.881321 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.881799 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.381787825 +0000 UTC m=+136.843635608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.894794 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:09 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:09 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:09 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.894836 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.943362 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m45jg" podStartSLOduration=117.943348226 podStartE2EDuration="1m57.943348226s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:09.942833478 +0000 UTC m=+136.404681261" watchObservedRunningTime="2025-12-03 12:23:09.943348226 +0000 UTC m=+136.405196009" Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.983610 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.983796 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.483778983 +0000 UTC m=+136.945626766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:09 crc kubenswrapper[4849]: I1203 12:23:09.984205 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:09 crc kubenswrapper[4849]: E1203 12:23:09.984716 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.484706388 +0000 UTC m=+136.946554171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.085583 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.085928 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.585888455 +0000 UTC m=+137.047736239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.086301 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.086730 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.586715031 +0000 UTC m=+137.048562813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.189442 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.189786 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.689774388 +0000 UTC m=+137.151622171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.290364 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.290676 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.790661722 +0000 UTC m=+137.252509504 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.392059 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.392206 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.892186634 +0000 UTC m=+137.354034418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.392435 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.392776 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.892765553 +0000 UTC m=+137.354613337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.493258 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.493898 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:10.993874874 +0000 UTC m=+137.455722657 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.594482 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.594770 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.094759492 +0000 UTC m=+137.556607275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.611280 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" event={"ID":"13bacd23-8657-41bd-ad96-26e1ec27bd42","Type":"ContainerStarted","Data":"6789aee4cff4003ebcd00613d5c57e67e10c5c4289310bb2146eaba55aa9d75f"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.611313 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" event={"ID":"13bacd23-8657-41bd-ad96-26e1ec27bd42","Type":"ContainerStarted","Data":"f0af508cc8931ce7c75843c28e1673b1410dcd1c5f279f8a36737bc55b2b5c6b"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.611323 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" event={"ID":"13bacd23-8657-41bd-ad96-26e1ec27bd42","Type":"ContainerStarted","Data":"68e9cd848818d192417989599d2dc1af35102da445076a999bad0a9a678778c8"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.613166 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" event={"ID":"6b4406f5-12c8-47b9-9e38-792a0c6e53ed","Type":"ContainerStarted","Data":"7f0eb24598d997408a25a896253cc8fb7b9a78155b0d1bc46f0b2e7100e9274a"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.614810 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" event={"ID":"19cb4ba0-d936-4448-b004-402ec12d9bdd","Type":"ContainerStarted","Data":"781ed9193aa484dcf9863edeae3d67205aa27b6624985bc06e0a646e44509004"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.614848 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" event={"ID":"19cb4ba0-d936-4448-b004-402ec12d9bdd","Type":"ContainerStarted","Data":"12990dbdd00a59f176f505f5834e4a63be99fb2b10a89e67dd4cb0175f863267"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.616007 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" event={"ID":"ea7b6a9d-c590-4658-af57-2e0d68f31a4d","Type":"ContainerStarted","Data":"682953b7c9eabac6edeedeecacd1b96d0584e186a88b573572488ec447372429"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.616032 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" event={"ID":"ea7b6a9d-c590-4658-af57-2e0d68f31a4d","Type":"ContainerStarted","Data":"ea139f207c273611e43d56d52dd8d5ac16e9a647becba966f4be505e140d8629"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.617251 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" event={"ID":"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413","Type":"ContainerStarted","Data":"6fdd714fdf50b3af6c2db211cd10fd1f1ab43bca7ba08b847ff368f1691fe0c0"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.617279 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" event={"ID":"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413","Type":"ContainerStarted","Data":"aae0445dfe23ae89429246fbaf897d0ee72ac837ffa2d66fd98c0348e198e38b"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.617799 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.618839 4849 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-v6w56 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.618867 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.619992 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" event={"ID":"23e004d1-bd4a-4ff4-a4ca-f9cca1c19580","Type":"ContainerStarted","Data":"9c4902d28406bb2fe88de3fd5a2988b9ebf7d0f7ad7596dfca91cee685f513bb"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.620330 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.622511 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" event={"ID":"6459e607-3af9-4658-8425-7d20afaa71da","Type":"ContainerStarted","Data":"8c8126e65d36fad388977f5adcd24d4280742ec36d0f165e49f25d55d3c85237"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.622560 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" event={"ID":"6459e607-3af9-4658-8425-7d20afaa71da","Type":"ContainerStarted","Data":"6876b5b23a64b22917f1929f30cfa18b12161992a4d7fad43e171662e731f25b"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.622572 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" event={"ID":"6459e607-3af9-4658-8425-7d20afaa71da","Type":"ContainerStarted","Data":"1dd3fb0f016b530d1aab0f36fea7c0091dcb55ea487ed54b631eb77fe01bdbfa"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.623932 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" event={"ID":"9ceeff5f-df0f-482d-b82e-2be694db679e","Type":"ContainerStarted","Data":"9a3cf0deeb8f5ff3467a9bae4482977c5b717a8b7c22b6fe02838ceb829d8226"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.623957 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" event={"ID":"9ceeff5f-df0f-482d-b82e-2be694db679e","Type":"ContainerStarted","Data":"967ff2e854a5a8b4f63830d3ef724688c09f881c37d904ef9860b8bfe6104c02"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.623968 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" event={"ID":"9ceeff5f-df0f-482d-b82e-2be694db679e","Type":"ContainerStarted","Data":"965ebf996db8ccec6231486d60f31d1e94ae3ebaafbd98a96429415fefa9ecfe"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.625120 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" event={"ID":"72044bfe-dd36-43dd-823f-1dfc7b07d8ac","Type":"ContainerStarted","Data":"860c8559fdba387eb835decbd53d514f8229919ca1f69b09e12bec01ad8f7d21"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.626309 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" event={"ID":"40fb2e03-cf2f-4504-9168-9e487f5799ea","Type":"ContainerStarted","Data":"40cf6ef1dfe441e04874984081bf1dc19ddaceef0f66d6433ebeb6f4a843a4e3"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.626335 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" event={"ID":"40fb2e03-cf2f-4504-9168-9e487f5799ea","Type":"ContainerStarted","Data":"aba68409fc0ab9d507eeac42d860bb0a7c053b194c3a997c7378f4f608de2f41"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.627690 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-gct6s" event={"ID":"e17f032e-e01e-4a6f-9f09-ccde7159a801","Type":"ContainerStarted","Data":"75b940ff7a764b1534d3c3cda31e8f193db8ecdc4f791ce6a98d1327092b1e45"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.627715 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-gct6s" event={"ID":"e17f032e-e01e-4a6f-9f09-ccde7159a801","Type":"ContainerStarted","Data":"c2245da3a5bb0b9bb4086b455f670768514ccb8fdc9bb3c688b0b1a79d8c8aa2"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.628965 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jcr69" event={"ID":"f6ce1f54-c500-4887-b9c7-cd89893a62ee","Type":"ContainerStarted","Data":"2d13dc6315aba119f68de95424a5fffd987c1f6426fc27b1d016240f4cf53146"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.628989 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-jcr69" event={"ID":"f6ce1f54-c500-4887-b9c7-cd89893a62ee","Type":"ContainerStarted","Data":"da38a46372aec01ee6c2f172cf3bb0e2a77863239ca3326630a9ba282f8e531d"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.629440 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.630513 4849 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcr69 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.630541 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcr69" podUID="f6ce1f54-c500-4887-b9c7-cd89893a62ee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.630847 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" event={"ID":"8478b9f3-643c-490d-8f8b-663e19230dc2","Type":"ContainerStarted","Data":"30ca7adfa0d56bcd9586142249f4e058b81c3d1589c8e27b091b09ddaf9eddd7"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.630877 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" event={"ID":"8478b9f3-643c-490d-8f8b-663e19230dc2","Type":"ContainerStarted","Data":"8d6aa84b3ded8ae691d13895e3fd6d2653465d6db4c8ce4804118c2c2392efd4"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.631152 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.632085 4849 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-f8svg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.632117 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.632527 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w7xvk" event={"ID":"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4","Type":"ContainerStarted","Data":"c757801d4f2a3a8f2242ae9bb8c0a4ed6814ac1f34daa8997eb2510c79f0a50f"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.632559 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w7xvk" event={"ID":"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4","Type":"ContainerStarted","Data":"b43ec2a8a98596d1c099cdf808f4979e90b3387ed7bc96103f41c443736ab239"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.632571 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-w7xvk" event={"ID":"8d71eb0e-8baf-4813-b29e-d8b0505f3ce4","Type":"ContainerStarted","Data":"3dc1f4871901a2a190d397cb68f200b9411e7d65a5d2e994ab13a051a3a6ea55"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.633009 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.634257 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" event={"ID":"85ee3144-92c7-48ec-855f-a3d01fc6d89a","Type":"ContainerStarted","Data":"9778e0f8bc192fc3353b909b6c3dca344f747348f6cb1deae92a26e787e8371b"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.634282 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" event={"ID":"85ee3144-92c7-48ec-855f-a3d01fc6d89a","Type":"ContainerStarted","Data":"a431a1b663e9ecc2e1923c0615c5a9abb92865bfcf3abd2a7593d5215a59bc49"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.634375 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-657zm" podStartSLOduration=118.634367813 podStartE2EDuration="1m58.634367813s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.633434056 +0000 UTC m=+137.095281829" watchObservedRunningTime="2025-12-03 12:23:10.634367813 +0000 UTC m=+137.096215595" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.634524 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-ls6fj" podStartSLOduration=118.63452058 podStartE2EDuration="1m58.63452058s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.017902117 +0000 UTC m=+136.479749900" watchObservedRunningTime="2025-12-03 12:23:10.63452058 +0000 UTC m=+137.096368363" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.635334 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" event={"ID":"45259dc8-64c2-4684-9669-e8b743a14857","Type":"ContainerStarted","Data":"4b4ea40b1488e7cdb8f2d06d72897591d06f3fa1ffa5375791dbea6b0fcd5f48"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.637321 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" event={"ID":"fcb38731-cca7-4ab2-a751-7ccd17fd2a27","Type":"ContainerStarted","Data":"b179236115fae80f43e39cdaa1fd19c0d19865122069242e0e60bf682b561d05"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.637348 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" event={"ID":"fcb38731-cca7-4ab2-a751-7ccd17fd2a27","Type":"ContainerStarted","Data":"122fb374a7f037ffca4b5d114fe879827722ff6541b3a623c5acbf60662719c2"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.638885 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" event={"ID":"ad1ad7d0-2483-4332-8d18-79fc262fe94b","Type":"ContainerStarted","Data":"5b8b72c938959953d87ff8451807460f9bd31f90d3dd4b2213695086301e27d7"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.638912 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" event={"ID":"ad1ad7d0-2483-4332-8d18-79fc262fe94b","Type":"ContainerStarted","Data":"9fc8e93ea855f64bd6754e2a925e2258d6fb69df2ecb1de61379263dce40807a"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.639236 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.640744 4849 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cgdl8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:5443/healthz\": dial tcp 10.217.0.28:5443: connect: connection refused" start-of-body= Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.640777 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" podUID="ad1ad7d0-2483-4332-8d18-79fc262fe94b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.28:5443/healthz\": dial tcp 10.217.0.28:5443: connect: connection refused" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.642310 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" event={"ID":"c4daca20-bfbb-4929-8a37-5f75500b1afd","Type":"ContainerStarted","Data":"c7f25fdca6bbc018f502e339bb5d6dbc9c8a336c9db2b1cc1ff7233e17dddd31"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.642337 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" event={"ID":"c4daca20-bfbb-4929-8a37-5f75500b1afd","Type":"ContainerStarted","Data":"41f227e5beac689c56e3f171655f36ddf5012839c04ea83595545e4c60a7ead6"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.642347 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" event={"ID":"c4daca20-bfbb-4929-8a37-5f75500b1afd","Type":"ContainerStarted","Data":"04d4322486d152faeb807c29c2719362d364ed57bd8c083a65ab175145d1df44"} Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.642359 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.675804 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.676004 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" podStartSLOduration=118.675995272 podStartE2EDuration="1m58.675995272s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.654918938 +0000 UTC m=+137.116766721" watchObservedRunningTime="2025-12-03 12:23:10.675995272 +0000 UTC m=+137.137843054" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.691111 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-zjlhp" podStartSLOduration=118.691097899 podStartE2EDuration="1m58.691097899s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.675860989 +0000 UTC m=+137.137708772" watchObservedRunningTime="2025-12-03 12:23:10.691097899 +0000 UTC m=+137.152945682" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.691455 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gt4qk" podStartSLOduration=118.691448919 podStartE2EDuration="1m58.691448919s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.689958676 +0000 UTC m=+137.151806459" watchObservedRunningTime="2025-12-03 12:23:10.691448919 +0000 UTC m=+137.153296702" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.695084 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.696614 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.196601981 +0000 UTC m=+137.658449765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.739309 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" podStartSLOduration=119.739294423 podStartE2EDuration="1m59.739294423s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.719345661 +0000 UTC m=+137.181193445" watchObservedRunningTime="2025-12-03 12:23:10.739294423 +0000 UTC m=+137.201142207" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.761981 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-jcr69" podStartSLOduration=118.761962952 podStartE2EDuration="1m58.761962952s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.738911774 +0000 UTC m=+137.200759556" watchObservedRunningTime="2025-12-03 12:23:10.761962952 +0000 UTC m=+137.223810735" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.782884 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-w7xvk" podStartSLOduration=6.782869395 podStartE2EDuration="6.782869395s" podCreationTimestamp="2025-12-03 12:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.763785209 +0000 UTC m=+137.225632992" watchObservedRunningTime="2025-12-03 12:23:10.782869395 +0000 UTC m=+137.244717178" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.783694 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" podStartSLOduration=119.783689147 podStartE2EDuration="1m59.783689147s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.781541037 +0000 UTC m=+137.243388810" watchObservedRunningTime="2025-12-03 12:23:10.783689147 +0000 UTC m=+137.245536921" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.797452 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.797825 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.297814086 +0000 UTC m=+137.759661869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.798809 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvg2x" podStartSLOduration=118.798795471 podStartE2EDuration="1m58.798795471s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.796905597 +0000 UTC m=+137.258753380" watchObservedRunningTime="2025-12-03 12:23:10.798795471 +0000 UTC m=+137.260643255" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.816050 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-gct6s" podStartSLOduration=6.816037833 podStartE2EDuration="6.816037833s" podCreationTimestamp="2025-12-03 12:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.814963222 +0000 UTC m=+137.276811005" watchObservedRunningTime="2025-12-03 12:23:10.816037833 +0000 UTC m=+137.277885617" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.855739 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nvzf6" podStartSLOduration=118.855721947 podStartE2EDuration="1m58.855721947s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.853323566 +0000 UTC m=+137.315171349" watchObservedRunningTime="2025-12-03 12:23:10.855721947 +0000 UTC m=+137.317569730" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.881954 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" podStartSLOduration=119.881938498 podStartE2EDuration="1m59.881938498s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.8775633 +0000 UTC m=+137.339411083" watchObservedRunningTime="2025-12-03 12:23:10.881938498 +0000 UTC m=+137.343786281" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.890520 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:10 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:10 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:10 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.890575 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.896152 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qxlsn" podStartSLOduration=118.896122749 podStartE2EDuration="1m58.896122749s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.895896813 +0000 UTC m=+137.357744596" watchObservedRunningTime="2025-12-03 12:23:10.896122749 +0000 UTC m=+137.357970532" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.898532 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.898729 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.398711488 +0000 UTC m=+137.860559271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.898823 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.899170 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.399162425 +0000 UTC m=+137.861010209 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.917876 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4cms" podStartSLOduration=118.917861368 podStartE2EDuration="1m58.917861368s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.917863592 +0000 UTC m=+137.379711375" watchObservedRunningTime="2025-12-03 12:23:10.917861368 +0000 UTC m=+137.379709150" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.943303 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" podStartSLOduration=118.943290828 podStartE2EDuration="1m58.943290828s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.941332436 +0000 UTC m=+137.403180218" watchObservedRunningTime="2025-12-03 12:23:10.943290828 +0000 UTC m=+137.405138611" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.973361 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.974227 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.977905 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.986308 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.991931 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" podStartSLOduration=119.991917613 podStartE2EDuration="1m59.991917613s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:10.990435305 +0000 UTC m=+137.452283088" watchObservedRunningTime="2025-12-03 12:23:10.991917613 +0000 UTC m=+137.453765396" Dec 03 12:23:10 crc kubenswrapper[4849]: I1203 12:23:10.999463 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:10 crc kubenswrapper[4849]: E1203 12:23:10.999892 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.499878477 +0000 UTC m=+137.961726259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.010767 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" podStartSLOduration=119.0107468 podStartE2EDuration="1m59.0107468s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:11.006495593 +0000 UTC m=+137.468343376" watchObservedRunningTime="2025-12-03 12:23:11.0107468 +0000 UTC m=+137.472594572" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.033453 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xpg5z" podStartSLOduration=119.033439183 podStartE2EDuration="1m59.033439183s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:11.032978876 +0000 UTC m=+137.494826660" watchObservedRunningTime="2025-12-03 12:23:11.033439183 +0000 UTC m=+137.495286966" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.100750 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.100819 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.100844 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.100910 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52lvf\" (UniqueName: \"kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.101072 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.601057138 +0000 UTC m=+138.062904921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.174962 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.175850 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.178908 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.189360 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.201710 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.201958 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52lvf\" (UniqueName: \"kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.202075 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.702058045 +0000 UTC m=+138.163905827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.202210 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.202297 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.202824 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.203008 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.227391 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52lvf\" (UniqueName: \"kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf\") pod \"certified-operators-6rcps\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.294070 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.303687 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.303765 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.303804 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.303824 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfcd9\" (UniqueName: \"kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.304022 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.804008277 +0000 UTC m=+138.265856060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.372359 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.373426 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.381533 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.404782 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.404927 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.904903145 +0000 UTC m=+138.366750927 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405007 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405036 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfcd9\" (UniqueName: \"kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405125 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405240 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.405702 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:11.90567705 +0000 UTC m=+138.367524833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405783 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.405818 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.423347 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfcd9\" (UniqueName: \"kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9\") pod \"community-operators-cwdxv\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.455166 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.455210 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.471188 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.499825 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.508055 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.508218 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.508248 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvpkk\" (UniqueName: \"kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.508279 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.508808 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.00879562 +0000 UTC m=+138.470643403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.566729 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.567560 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.585013 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.609191 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.609229 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvpkk\" (UniqueName: \"kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.609254 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.609286 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.609533 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.109521419 +0000 UTC m=+138.571369202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.609872 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.610078 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.632555 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvpkk\" (UniqueName: \"kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk\") pod \"certified-operators-jbm2s\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.632869 4849 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.639410 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.685226 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.685506 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerStarted","Data":"c6340a35976ec2e2d4e2eb4629762a66ce0c6f6d7f76179504c505b65b84f4ad"} Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.707863 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" event={"ID":"45259dc8-64c2-4684-9669-e8b743a14857","Type":"ContainerStarted","Data":"c53c682c845ddb626e579e6b4b96ee0f79c5bb9394b0e7d6267935f24f087422"} Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.707898 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" event={"ID":"45259dc8-64c2-4684-9669-e8b743a14857","Type":"ContainerStarted","Data":"34cd59dfaa50013dd26eb5c6e055cb1583597abcf0c9e59b19d2e7d5378980d8"} Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.707910 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" event={"ID":"45259dc8-64c2-4684-9669-e8b743a14857","Type":"ContainerStarted","Data":"8cd90b645d49ecfb7931effa323f4fe1443d8d487356a14434ba4589750a2447"} Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.714633 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.714987 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.715080 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.715139 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97rhs\" (UniqueName: \"kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.715189 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.215164237 +0000 UTC m=+138.677012020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.715372 4849 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcr69 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.715421 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcr69" podUID="f6ce1f54-c500-4887-b9c7-cd89893a62ee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.722421 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-cnzhf" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.727191 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.735967 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.739833 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cgdl8" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.827296 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.827523 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97rhs\" (UniqueName: \"kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.827878 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.827898 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.831420 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.835106 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.335092414 +0000 UTC m=+138.796940197 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.835105 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.877376 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97rhs\" (UniqueName: \"kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs\") pod \"community-operators-hlk48\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.878967 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:11 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:11 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:11 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.879006 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.907248 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.929198 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:11 crc kubenswrapper[4849]: E1203 12:23:11.929439 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.429419228 +0000 UTC m=+138.891267011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:11 crc kubenswrapper[4849]: I1203 12:23:11.973776 4849 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T12:23:11.633093647Z","Handler":null,"Name":""} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.011622 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.011696 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.019207 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.034279 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:12 crc kubenswrapper[4849]: E1203 12:23:12.034618 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 12:23:12.534606379 +0000 UTC m=+138.996454152 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8lbfv" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.104530 4849 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.104586 4849 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.137147 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.147170 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.239210 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.245519 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.245553 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.267703 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8lbfv\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.272356 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:12 crc kubenswrapper[4849]: W1203 12:23:12.290886 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod314e4660_f3be_424e_8635_f4e85fca56be.slice/crio-a165e79ffb17450c251d42bb977b9feb225fc346f8628e3a52bbadec99917dd1 WatchSource:0}: Error finding container a165e79ffb17450c251d42bb977b9feb225fc346f8628e3a52bbadec99917dd1: Status 404 returned error can't find the container with id a165e79ffb17450c251d42bb977b9feb225fc346f8628e3a52bbadec99917dd1 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.304700 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:12 crc kubenswrapper[4849]: W1203 12:23:12.313017 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod195616d8_386c_4b49_80b8_6a8f1dfd87a6.slice/crio-dce64a543bdf752432b0bb20c32d5ddf46d65dbaf6a89b2cd01245f19b50bf4e WatchSource:0}: Error finding container dce64a543bdf752432b0bb20c32d5ddf46d65dbaf6a89b2cd01245f19b50bf4e: Status 404 returned error can't find the container with id dce64a543bdf752432b0bb20c32d5ddf46d65dbaf6a89b2cd01245f19b50bf4e Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.476811 4849 patch_prober.go:28] interesting pod/apiserver-76f77b778f-5jm84 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]log ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]etcd ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/generic-apiserver-start-informers ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/max-in-flight-filter ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 03 12:23:12 crc kubenswrapper[4849]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 03 12:23:12 crc kubenswrapper[4849]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/project.openshift.io-projectcache ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/openshift.io-startinformers ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 03 12:23:12 crc kubenswrapper[4849]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 03 12:23:12 crc kubenswrapper[4849]: livez check failed Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.477100 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" podUID="fcb38731-cca7-4ab2-a751-7ccd17fd2a27" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.513823 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.661768 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:23:12 crc kubenswrapper[4849]: W1203 12:23:12.670755 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc24803cd_8c4d_4ff4_9b85_441f955bd3f3.slice/crio-46ecc0526d96760062231cb32c86e3dbb33bff61462b577dc162d196617883d6 WatchSource:0}: Error finding container 46ecc0526d96760062231cb32c86e3dbb33bff61462b577dc162d196617883d6: Status 404 returned error can't find the container with id 46ecc0526d96760062231cb32c86e3dbb33bff61462b577dc162d196617883d6 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.718125 4849 generic.go:334] "Generic (PLEG): container finished" podID="314e4660-f3be-424e-8635-f4e85fca56be" containerID="6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe" exitCode=0 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.718232 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerDied","Data":"6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.718269 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerStarted","Data":"a165e79ffb17450c251d42bb977b9feb225fc346f8628e3a52bbadec99917dd1"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.719250 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" event={"ID":"c24803cd-8c4d-4ff4-9b85-441f955bd3f3","Type":"ContainerStarted","Data":"46ecc0526d96760062231cb32c86e3dbb33bff61462b577dc162d196617883d6"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.719407 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.721022 4849 generic.go:334] "Generic (PLEG): container finished" podID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerID="5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113" exitCode=0 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.721068 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerDied","Data":"5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.721083 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerStarted","Data":"dce64a543bdf752432b0bb20c32d5ddf46d65dbaf6a89b2cd01245f19b50bf4e"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.723517 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c329326-860d-4eed-855d-e7811ee41819" containerID="91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe" exitCode=0 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.723564 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerDied","Data":"91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.723579 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerStarted","Data":"3ebdc20a1c69ddf42e51f27476064c442a187ff3e2bf207ec25f844ca5d239aa"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.729174 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" event={"ID":"45259dc8-64c2-4684-9669-e8b743a14857","Type":"ContainerStarted","Data":"b4ab958ff2ba506f623302fe64a2dbe9175003beca65f071aaed26cd1008cc70"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.731019 4849 generic.go:334] "Generic (PLEG): container finished" podID="4454830a-59f9-4ece-8e5c-554b725015ec" containerID="df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e" exitCode=0 Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.732053 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerDied","Data":"df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e"} Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.734237 4849 patch_prober.go:28] interesting pod/downloads-7954f5f757-jcr69 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.734263 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-jcr69" podUID="f6ce1f54-c500-4887-b9c7-cd89893a62ee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.775986 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-cfwmz" podStartSLOduration=8.775960923 podStartE2EDuration="8.775960923s" podCreationTimestamp="2025-12-03 12:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:12.773655476 +0000 UTC m=+139.235503259" watchObservedRunningTime="2025-12-03 12:23:12.775960923 +0000 UTC m=+139.237808706" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.878225 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:12 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:12 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:12 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.878276 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.962960 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.963929 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.965711 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:23:12 crc kubenswrapper[4849]: I1203 12:23:12.971286 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.012699 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-v9bf5" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.052508 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.052539 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.052589 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spcbn\" (UniqueName: \"kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.154254 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spcbn\" (UniqueName: \"kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.154421 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.154438 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.154895 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.154933 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.170547 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spcbn\" (UniqueName: \"kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn\") pod \"redhat-marketplace-h5j2b\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.278180 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.363303 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.364169 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.385589 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.439419 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:23:13 crc kubenswrapper[4849]: W1203 12:23:13.449465 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fbb5c1c_5b96_4563_be16_83f73dece6aa.slice/crio-ecec2f1d72adba43d4ce6f4bfb623205eb5983d9acfe5ba72dfe75c539e91fb6 WatchSource:0}: Error finding container ecec2f1d72adba43d4ce6f4bfb623205eb5983d9acfe5ba72dfe75c539e91fb6: Status 404 returned error can't find the container with id ecec2f1d72adba43d4ce6f4bfb623205eb5983d9acfe5ba72dfe75c539e91fb6 Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.458503 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.458579 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.458595 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dfbh\" (UniqueName: \"kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.559550 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.559627 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.559663 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dfbh\" (UniqueName: \"kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.560295 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.560512 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.583584 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dfbh\" (UniqueName: \"kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh\") pod \"redhat-marketplace-k4pjk\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.681360 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.740730 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" event={"ID":"c24803cd-8c4d-4ff4-9b85-441f955bd3f3","Type":"ContainerStarted","Data":"23a0eb6ca1343e2c30ad6eceadee12b13db4d77a0d9b8d849770be3690e9abc7"} Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.740886 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.745078 4849 generic.go:334] "Generic (PLEG): container finished" podID="40fb2e03-cf2f-4504-9168-9e487f5799ea" containerID="40cf6ef1dfe441e04874984081bf1dc19ddaceef0f66d6433ebeb6f4a843a4e3" exitCode=0 Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.745158 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" event={"ID":"40fb2e03-cf2f-4504-9168-9e487f5799ea","Type":"ContainerDied","Data":"40cf6ef1dfe441e04874984081bf1dc19ddaceef0f66d6433ebeb6f4a843a4e3"} Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.747444 4849 generic.go:334] "Generic (PLEG): container finished" podID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerID="79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b" exitCode=0 Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.748140 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerDied","Data":"79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b"} Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.748184 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerStarted","Data":"ecec2f1d72adba43d4ce6f4bfb623205eb5983d9acfe5ba72dfe75c539e91fb6"} Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.756190 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" podStartSLOduration=121.756178034 podStartE2EDuration="2m1.756178034s" podCreationTimestamp="2025-12-03 12:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:13.75480973 +0000 UTC m=+140.216657513" watchObservedRunningTime="2025-12-03 12:23:13.756178034 +0000 UTC m=+140.218025816" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.871090 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.881691 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:13 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:13 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:13 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:13 crc kubenswrapper[4849]: I1203 12:23:13.881778 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.110011 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:14 crc kubenswrapper[4849]: W1203 12:23:14.121209 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c85938a_071b_468a_a7d6_7a106913311b.slice/crio-c6b3874be43327808c1d9ae5387d2035d55979de60f6bef7c04cafcd1290259b WatchSource:0}: Error finding container c6b3874be43327808c1d9ae5387d2035d55979de60f6bef7c04cafcd1290259b: Status 404 returned error can't find the container with id c6b3874be43327808c1d9ae5387d2035d55979de60f6bef7c04cafcd1290259b Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.367303 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.369099 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.375084 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.376686 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.477401 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.477772 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.478103 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddjjr\" (UniqueName: \"kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.579149 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.579187 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.579250 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddjjr\" (UniqueName: \"kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.579776 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.579845 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.597988 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddjjr\" (UniqueName: \"kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr\") pod \"redhat-operators-jvk79\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.691125 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.772165 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.774285 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.777048 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.810597 4849 generic.go:334] "Generic (PLEG): container finished" podID="1c85938a-071b-468a-a7d6-7a106913311b" containerID="b0013266c223be31ee8c4106b60842b4ef64597373a735655948e99c9b56a8cc" exitCode=0 Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.811040 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerDied","Data":"b0013266c223be31ee8c4106b60842b4ef64597373a735655948e99c9b56a8cc"} Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.811076 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerStarted","Data":"c6b3874be43327808c1d9ae5387d2035d55979de60f6bef7c04cafcd1290259b"} Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.880925 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:14 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:14 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:14 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.880969 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.896551 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.896751 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-299jg\" (UniqueName: \"kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.896793 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.923702 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.932051 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.933299 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.937604 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.938084 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.940264 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.998079 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:14 crc kubenswrapper[4849]: I1203 12:23:14.998151 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:14.999909 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.003523 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-299jg\" (UniqueName: \"kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.003578 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.003744 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.005036 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.042493 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-299jg\" (UniqueName: \"kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg\") pod \"redhat-operators-xdhfk\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.054446 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.105376 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.105488 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.105600 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.112793 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.131352 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.193322 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:23:15 crc kubenswrapper[4849]: E1203 12:23:15.193630 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40fb2e03-cf2f-4504-9168-9e487f5799ea" containerName="collect-profiles" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.193729 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="40fb2e03-cf2f-4504-9168-9e487f5799ea" containerName="collect-profiles" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.193898 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="40fb2e03-cf2f-4504-9168-9e487f5799ea" containerName="collect-profiles" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.199631 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.202373 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.204562 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.205093 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.206350 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume\") pod \"40fb2e03-cf2f-4504-9168-9e487f5799ea\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.206411 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7kzw\" (UniqueName: \"kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw\") pod \"40fb2e03-cf2f-4504-9168-9e487f5799ea\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.206538 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume\") pod \"40fb2e03-cf2f-4504-9168-9e487f5799ea\" (UID: \"40fb2e03-cf2f-4504-9168-9e487f5799ea\") " Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.207687 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume" (OuterVolumeSpecName: "config-volume") pod "40fb2e03-cf2f-4504-9168-9e487f5799ea" (UID: "40fb2e03-cf2f-4504-9168-9e487f5799ea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.211442 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "40fb2e03-cf2f-4504-9168-9e487f5799ea" (UID: "40fb2e03-cf2f-4504-9168-9e487f5799ea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.211468 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw" (OuterVolumeSpecName: "kube-api-access-h7kzw") pod "40fb2e03-cf2f-4504-9168-9e487f5799ea" (UID: "40fb2e03-cf2f-4504-9168-9e487f5799ea"). InnerVolumeSpecName "kube-api-access-h7kzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.262479 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.307475 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.307768 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.307861 4849 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/40fb2e03-cf2f-4504-9168-9e487f5799ea-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.307874 4849 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40fb2e03-cf2f-4504-9168-9e487f5799ea-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.307884 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7kzw\" (UniqueName: \"kubernetes.io/projected/40fb2e03-cf2f-4504-9168-9e487f5799ea-kube-api-access-h7kzw\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.411123 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.411359 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.411536 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.426910 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.451753 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 12:23:15 crc kubenswrapper[4849]: W1203 12:23:15.511225 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode444288e_49e3_4708_bd0b_9c70b5f09796.slice/crio-8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc WatchSource:0}: Error finding container 8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc: Status 404 returned error can't find the container with id 8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.527285 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.573729 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:15 crc kubenswrapper[4849]: W1203 12:23:15.636033 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15583ad6_2adf_4151_aa5b_8a2f4ba0bb56.slice/crio-afa846409e87684df06ad8e23e6ecaa3a81ada28e38de07e41ba92f654331c35 WatchSource:0}: Error finding container afa846409e87684df06ad8e23e6ecaa3a81ada28e38de07e41ba92f654331c35: Status 404 returned error can't find the container with id afa846409e87684df06ad8e23e6ecaa3a81ada28e38de07e41ba92f654331c35 Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.820712 4849 generic.go:334] "Generic (PLEG): container finished" podID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerID="d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a" exitCode=0 Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.821044 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerDied","Data":"d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a"} Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.821075 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerStarted","Data":"fce386904306390b56ec223e5eab5635331b7d7f4b17eee2c0ec65cd7b573fdb"} Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.827376 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e444288e-49e3-4708-bd0b-9c70b5f09796","Type":"ContainerStarted","Data":"8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc"} Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.832362 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.832360 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412735-hgspx" event={"ID":"40fb2e03-cf2f-4504-9168-9e487f5799ea","Type":"ContainerDied","Data":"aba68409fc0ab9d507eeac42d860bb0a7c053b194c3a997c7378f4f608de2f41"} Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.832463 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aba68409fc0ab9d507eeac42d860bb0a7c053b194c3a997c7378f4f608de2f41" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.842307 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerStarted","Data":"afa846409e87684df06ad8e23e6ecaa3a81ada28e38de07e41ba92f654331c35"} Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.878474 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:15 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:15 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:15 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.878531 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:15 crc kubenswrapper[4849]: I1203 12:23:15.904013 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 12:23:15 crc kubenswrapper[4849]: W1203 12:23:15.925766 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2187ef92_2a54_4646_9c49_9fa970e8ae23.slice/crio-b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562 WatchSource:0}: Error finding container b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562: Status 404 returned error can't find the container with id b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562 Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.487697 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.488075 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.489013 4849 patch_prober.go:28] interesting pod/console-f9d7485db-z6qrp container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.489064 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z6qrp" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerName="console" probeResult="failure" output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.852292 4849 generic.go:334] "Generic (PLEG): container finished" podID="2187ef92-2a54-4646-9c49-9fa970e8ae23" containerID="915045af0de35199483a08e0c79068efc8d331b87369987c88d8836c10008795" exitCode=0 Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.852769 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2187ef92-2a54-4646-9c49-9fa970e8ae23","Type":"ContainerDied","Data":"915045af0de35199483a08e0c79068efc8d331b87369987c88d8836c10008795"} Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.852822 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2187ef92-2a54-4646-9c49-9fa970e8ae23","Type":"ContainerStarted","Data":"b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562"} Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.857115 4849 generic.go:334] "Generic (PLEG): container finished" podID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerID="ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35" exitCode=0 Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.857355 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerDied","Data":"ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35"} Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.863342 4849 generic.go:334] "Generic (PLEG): container finished" podID="e444288e-49e3-4708-bd0b-9c70b5f09796" containerID="8e42f14a67ceb3fac5471817f1f457c1d2fb06066fc2019e098fc558769d921c" exitCode=0 Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.863384 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e444288e-49e3-4708-bd0b-9c70b5f09796","Type":"ContainerDied","Data":"8e42f14a67ceb3fac5471817f1f457c1d2fb06066fc2019e098fc558769d921c"} Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.875501 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.877765 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:16 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:16 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:16 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:16 crc kubenswrapper[4849]: I1203 12:23:16.877808 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:17 crc kubenswrapper[4849]: I1203 12:23:17.013174 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:17 crc kubenswrapper[4849]: I1203 12:23:17.022197 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-5jm84" Dec 03 12:23:17 crc kubenswrapper[4849]: I1203 12:23:17.288604 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-jcr69" Dec 03 12:23:17 crc kubenswrapper[4849]: I1203 12:23:17.877244 4849 patch_prober.go:28] interesting pod/router-default-5444994796-zdq4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 12:23:17 crc kubenswrapper[4849]: [-]has-synced failed: reason withheld Dec 03 12:23:17 crc kubenswrapper[4849]: [+]process-running ok Dec 03 12:23:17 crc kubenswrapper[4849]: healthz check failed Dec 03 12:23:17 crc kubenswrapper[4849]: I1203 12:23:17.877303 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zdq4x" podUID="7eca320e-67e3-4f03-92ff-8a79363ca7ad" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 12:23:18 crc kubenswrapper[4849]: I1203 12:23:18.019214 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:23:18 crc kubenswrapper[4849]: I1203 12:23:18.884949 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:18 crc kubenswrapper[4849]: I1203 12:23:18.890021 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zdq4x" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.696433 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.696509 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.697670 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.714108 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.797240 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.797368 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.816369 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.818792 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.972383 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.979426 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 12:23:19 crc kubenswrapper[4849]: I1203 12:23:19.984366 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.377619 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.380222 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505488 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir\") pod \"2187ef92-2a54-4646-9c49-9fa970e8ae23\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505547 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access\") pod \"e444288e-49e3-4708-bd0b-9c70b5f09796\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505574 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access\") pod \"2187ef92-2a54-4646-9c49-9fa970e8ae23\" (UID: \"2187ef92-2a54-4646-9c49-9fa970e8ae23\") " Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505631 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir\") pod \"e444288e-49e3-4708-bd0b-9c70b5f09796\" (UID: \"e444288e-49e3-4708-bd0b-9c70b5f09796\") " Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505935 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e444288e-49e3-4708-bd0b-9c70b5f09796" (UID: "e444288e-49e3-4708-bd0b-9c70b5f09796"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.505979 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2187ef92-2a54-4646-9c49-9fa970e8ae23" (UID: "2187ef92-2a54-4646-9c49-9fa970e8ae23"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.508587 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e444288e-49e3-4708-bd0b-9c70b5f09796" (UID: "e444288e-49e3-4708-bd0b-9c70b5f09796"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.509897 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2187ef92-2a54-4646-9c49-9fa970e8ae23" (UID: "2187ef92-2a54-4646-9c49-9fa970e8ae23"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.607498 4849 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e444288e-49e3-4708-bd0b-9c70b5f09796-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.607552 4849 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2187ef92-2a54-4646-9c49-9fa970e8ae23-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.607562 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e444288e-49e3-4708-bd0b-9c70b5f09796-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.607571 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2187ef92-2a54-4646-9c49-9fa970e8ae23-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.905184 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e444288e-49e3-4708-bd0b-9c70b5f09796","Type":"ContainerDied","Data":"8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc"} Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.905220 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eb46b91182c492ea35bebb1654995e09f9e6fee9d43cef2eb541a0697a786bc" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.905266 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.907954 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2187ef92-2a54-4646-9c49-9fa970e8ae23","Type":"ContainerDied","Data":"b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562"} Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.908034 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1d7231250e7244a74eff5e55f46042f7cf1764edbb689cc7d4794136b4be562" Dec 03 12:23:20 crc kubenswrapper[4849]: I1203 12:23:20.908097 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 12:23:21 crc kubenswrapper[4849]: W1203 12:23:21.341597 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-9a0fb1676e6076f66e2ac26f3d821f832e5b4695e76c6b4362491144420e8cc8 WatchSource:0}: Error finding container 9a0fb1676e6076f66e2ac26f3d821f832e5b4695e76c6b4362491144420e8cc8: Status 404 returned error can't find the container with id 9a0fb1676e6076f66e2ac26f3d821f832e5b4695e76c6b4362491144420e8cc8 Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.919575 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8ce825fb7b306be90483dd10938d69d6458c83eb819ec862d56998d7c0f35ccf"} Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.919857 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3d49052e1b49e6d40ea6598ee843b8170c8330c42e5f2cbf2e0a7463e0b4f3fc"} Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.922132 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d7cfba0454ade25d779302afb45d62344bce11a1996c475d2c496ae2d78ae7b4"} Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.922171 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0c5ccc36a3f1a2524650619461988e37b5f2ab357d07d77753b99b285c90b5ae"} Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.922297 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.925503 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"228da913824cff4456e585384cd550c6bf253ebcb2e3fa511a7d65c654a308f3"} Dec 03 12:23:21 crc kubenswrapper[4849]: I1203 12:23:21.925829 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9a0fb1676e6076f66e2ac26f3d821f832e5b4695e76c6b4362491144420e8cc8"} Dec 03 12:23:22 crc kubenswrapper[4849]: I1203 12:23:22.578158 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-w7xvk" Dec 03 12:23:22 crc kubenswrapper[4849]: I1203 12:23:22.677155 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:23:22 crc kubenswrapper[4849]: I1203 12:23:22.677202 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:23:26 crc kubenswrapper[4849]: I1203 12:23:26.491937 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:26 crc kubenswrapper[4849]: I1203 12:23:26.500584 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.973995 4849 generic.go:334] "Generic (PLEG): container finished" podID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerID="f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.974085 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerDied","Data":"f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.978168 4849 generic.go:334] "Generic (PLEG): container finished" podID="4454830a-59f9-4ece-8e5c-554b725015ec" containerID="1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.978267 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerDied","Data":"1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.982551 4849 generic.go:334] "Generic (PLEG): container finished" podID="1c85938a-071b-468a-a7d6-7a106913311b" containerID="81d4e149e4f3d8dbf52c448e74f39936491680d194f1a4f75e418c00f8da480d" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.982621 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerDied","Data":"81d4e149e4f3d8dbf52c448e74f39936491680d194f1a4f75e418c00f8da480d"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.986471 4849 generic.go:334] "Generic (PLEG): container finished" podID="314e4660-f3be-424e-8635-f4e85fca56be" containerID="3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.986524 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerDied","Data":"3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.991797 4849 generic.go:334] "Generic (PLEG): container finished" podID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerID="c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.991873 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerDied","Data":"c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.994962 4849 generic.go:334] "Generic (PLEG): container finished" podID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerID="768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.995011 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerDied","Data":"768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6"} Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.997623 4849 generic.go:334] "Generic (PLEG): container finished" podID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerID="72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b" exitCode=0 Dec 03 12:23:31 crc kubenswrapper[4849]: I1203 12:23:31.997669 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerDied","Data":"72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b"} Dec 03 12:23:32 crc kubenswrapper[4849]: I1203 12:23:32.002326 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c329326-860d-4eed-855d-e7811ee41819" containerID="fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48" exitCode=0 Dec 03 12:23:32 crc kubenswrapper[4849]: I1203 12:23:32.002428 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerDied","Data":"fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48"} Dec 03 12:23:32 crc kubenswrapper[4849]: I1203 12:23:32.518692 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.008863 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerStarted","Data":"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.011347 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerStarted","Data":"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.012949 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerStarted","Data":"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.014487 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerStarted","Data":"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.016704 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerStarted","Data":"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.018533 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerStarted","Data":"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.030032 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerStarted","Data":"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.032141 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerStarted","Data":"93e7a511cc932c8bb7156beef0daecd198203930cb31b8833c087959ce830900"} Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.035809 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hlk48" podStartSLOduration=2.29303056 podStartE2EDuration="22.035794904s" podCreationTimestamp="2025-12-03 12:23:11 +0000 UTC" firstStartedPulling="2025-12-03 12:23:12.719184751 +0000 UTC m=+139.181032533" lastFinishedPulling="2025-12-03 12:23:32.461949094 +0000 UTC m=+158.923796877" observedRunningTime="2025-12-03 12:23:33.034403826 +0000 UTC m=+159.496251610" watchObservedRunningTime="2025-12-03 12:23:33.035794904 +0000 UTC m=+159.497642687" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.051037 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k4pjk" podStartSLOduration=2.399294889 podStartE2EDuration="20.051023899s" podCreationTimestamp="2025-12-03 12:23:13 +0000 UTC" firstStartedPulling="2025-12-03 12:23:14.812482028 +0000 UTC m=+141.274329811" lastFinishedPulling="2025-12-03 12:23:32.464211038 +0000 UTC m=+158.926058821" observedRunningTime="2025-12-03 12:23:33.049579672 +0000 UTC m=+159.511427455" watchObservedRunningTime="2025-12-03 12:23:33.051023899 +0000 UTC m=+159.512871681" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.066928 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jvk79" podStartSLOduration=2.383932855 podStartE2EDuration="19.066917253s" podCreationTimestamp="2025-12-03 12:23:14 +0000 UTC" firstStartedPulling="2025-12-03 12:23:15.830838116 +0000 UTC m=+142.292685900" lastFinishedPulling="2025-12-03 12:23:32.513822514 +0000 UTC m=+158.975670298" observedRunningTime="2025-12-03 12:23:33.065483966 +0000 UTC m=+159.527331749" watchObservedRunningTime="2025-12-03 12:23:33.066917253 +0000 UTC m=+159.528765036" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.082302 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cwdxv" podStartSLOduration=2.318832584 podStartE2EDuration="22.082286622s" podCreationTimestamp="2025-12-03 12:23:11 +0000 UTC" firstStartedPulling="2025-12-03 12:23:12.724517792 +0000 UTC m=+139.186365575" lastFinishedPulling="2025-12-03 12:23:32.487971831 +0000 UTC m=+158.949819613" observedRunningTime="2025-12-03 12:23:33.081405104 +0000 UTC m=+159.543252887" watchObservedRunningTime="2025-12-03 12:23:33.082286622 +0000 UTC m=+159.544134405" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.099405 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jbm2s" podStartSLOduration=2.357366754 podStartE2EDuration="22.099393289s" podCreationTimestamp="2025-12-03 12:23:11 +0000 UTC" firstStartedPulling="2025-12-03 12:23:12.72209805 +0000 UTC m=+139.183945834" lastFinishedPulling="2025-12-03 12:23:32.464124586 +0000 UTC m=+158.925972369" observedRunningTime="2025-12-03 12:23:33.098005257 +0000 UTC m=+159.559853040" watchObservedRunningTime="2025-12-03 12:23:33.099393289 +0000 UTC m=+159.561241072" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.117971 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xdhfk" podStartSLOduration=7.029324051 podStartE2EDuration="19.117957366s" podCreationTimestamp="2025-12-03 12:23:14 +0000 UTC" firstStartedPulling="2025-12-03 12:23:20.34087541 +0000 UTC m=+146.802723193" lastFinishedPulling="2025-12-03 12:23:32.429508725 +0000 UTC m=+158.891356508" observedRunningTime="2025-12-03 12:23:33.117319607 +0000 UTC m=+159.579167391" watchObservedRunningTime="2025-12-03 12:23:33.117957366 +0000 UTC m=+159.579805150" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.131239 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5j2b" podStartSLOduration=2.373297848 podStartE2EDuration="21.131225773s" podCreationTimestamp="2025-12-03 12:23:12 +0000 UTC" firstStartedPulling="2025-12-03 12:23:13.748464605 +0000 UTC m=+140.210312388" lastFinishedPulling="2025-12-03 12:23:32.50639253 +0000 UTC m=+158.968240313" observedRunningTime="2025-12-03 12:23:33.130630664 +0000 UTC m=+159.592478447" watchObservedRunningTime="2025-12-03 12:23:33.131225773 +0000 UTC m=+159.593073556" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.149508 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6rcps" podStartSLOduration=3.3475471629999998 podStartE2EDuration="23.149492462s" podCreationTimestamp="2025-12-03 12:23:10 +0000 UTC" firstStartedPulling="2025-12-03 12:23:12.736846581 +0000 UTC m=+139.198694365" lastFinishedPulling="2025-12-03 12:23:32.53879188 +0000 UTC m=+159.000639664" observedRunningTime="2025-12-03 12:23:33.147040781 +0000 UTC m=+159.608888553" watchObservedRunningTime="2025-12-03 12:23:33.149492462 +0000 UTC m=+159.611340245" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.279225 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.279510 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.477385 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.482184 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cebc8f9-e598-45ce-aed1-4fbd7df7fb86-metrics-certs\") pod \"network-metrics-daemon-hjzzk\" (UID: \"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86\") " pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.681633 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.681892 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:33 crc kubenswrapper[4849]: I1203 12:23:33.766900 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hjzzk" Dec 03 12:23:34 crc kubenswrapper[4849]: I1203 12:23:34.160802 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hjzzk"] Dec 03 12:23:34 crc kubenswrapper[4849]: W1203 12:23:34.165438 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cebc8f9_e598_45ce_aed1_4fbd7df7fb86.slice/crio-375246e717a4ca66711fd0b160b5c93488a25a0e611c4eada4ac67cf2067222d WatchSource:0}: Error finding container 375246e717a4ca66711fd0b160b5c93488a25a0e611c4eada4ac67cf2067222d: Status 404 returned error can't find the container with id 375246e717a4ca66711fd0b160b5c93488a25a0e611c4eada4ac67cf2067222d Dec 03 12:23:34 crc kubenswrapper[4849]: I1203 12:23:34.353249 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-h5j2b" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="registry-server" probeResult="failure" output=< Dec 03 12:23:34 crc kubenswrapper[4849]: timeout: failed to connect service ":50051" within 1s Dec 03 12:23:34 crc kubenswrapper[4849]: > Dec 03 12:23:34 crc kubenswrapper[4849]: I1203 12:23:34.691395 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:34 crc kubenswrapper[4849]: I1203 12:23:34.691609 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:34 crc kubenswrapper[4849]: I1203 12:23:34.721993 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-k4pjk" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="registry-server" probeResult="failure" output=< Dec 03 12:23:34 crc kubenswrapper[4849]: timeout: failed to connect service ":50051" within 1s Dec 03 12:23:34 crc kubenswrapper[4849]: > Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.043701 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" event={"ID":"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86","Type":"ContainerStarted","Data":"b1bfa2f25f4bf0c32bc5e5977bbc1f6c8c07c16477ae32adbc18a8cc6a94238c"} Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.043746 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" event={"ID":"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86","Type":"ContainerStarted","Data":"707e8f971207241fd34e54f0502e42d2bec368ee194f675c320a03f215789463"} Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.043757 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hjzzk" event={"ID":"1cebc8f9-e598-45ce-aed1-4fbd7df7fb86","Type":"ContainerStarted","Data":"375246e717a4ca66711fd0b160b5c93488a25a0e611c4eada4ac67cf2067222d"} Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.054814 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-hjzzk" podStartSLOduration=144.054801154 podStartE2EDuration="2m24.054801154s" podCreationTimestamp="2025-12-03 12:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:35.054196749 +0000 UTC m=+161.516044531" watchObservedRunningTime="2025-12-03 12:23:35.054801154 +0000 UTC m=+161.516648938" Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.113418 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.113467 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:35 crc kubenswrapper[4849]: I1203 12:23:35.720841 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jvk79" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="registry-server" probeResult="failure" output=< Dec 03 12:23:35 crc kubenswrapper[4849]: timeout: failed to connect service ":50051" within 1s Dec 03 12:23:35 crc kubenswrapper[4849]: > Dec 03 12:23:36 crc kubenswrapper[4849]: I1203 12:23:36.137678 4849 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xdhfk" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="registry-server" probeResult="failure" output=< Dec 03 12:23:36 crc kubenswrapper[4849]: timeout: failed to connect service ":50051" within 1s Dec 03 12:23:36 crc kubenswrapper[4849]: > Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.294780 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.295130 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.325324 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.501290 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.501324 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.527043 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.686708 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.686756 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.713018 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.908715 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.908757 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:41 crc kubenswrapper[4849]: I1203 12:23:41.937113 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:42 crc kubenswrapper[4849]: I1203 12:23:42.099254 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:42 crc kubenswrapper[4849]: I1203 12:23:42.099996 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:42 crc kubenswrapper[4849]: I1203 12:23:42.100654 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:23:42 crc kubenswrapper[4849]: I1203 12:23:42.105985 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.315239 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.344136 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.347522 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.709656 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.738722 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:43 crc kubenswrapper[4849]: I1203 12:23:43.948306 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.079800 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jbm2s" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="registry-server" containerID="cri-o://7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1" gracePeriod=2 Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.080267 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hlk48" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="registry-server" containerID="cri-o://23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9" gracePeriod=2 Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.512092 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.516886 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694068 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities\") pod \"314e4660-f3be-424e-8635-f4e85fca56be\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694324 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities\") pod \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694906 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content\") pod \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694708 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities" (OuterVolumeSpecName: "utilities") pod "314e4660-f3be-424e-8635-f4e85fca56be" (UID: "314e4660-f3be-424e-8635-f4e85fca56be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694875 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities" (OuterVolumeSpecName: "utilities") pod "195616d8-386c-4b49-80b8-6a8f1dfd87a6" (UID: "195616d8-386c-4b49-80b8-6a8f1dfd87a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.694977 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content\") pod \"314e4660-f3be-424e-8635-f4e85fca56be\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.695014 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvpkk\" (UniqueName: \"kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk\") pod \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\" (UID: \"195616d8-386c-4b49-80b8-6a8f1dfd87a6\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.695043 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97rhs\" (UniqueName: \"kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs\") pod \"314e4660-f3be-424e-8635-f4e85fca56be\" (UID: \"314e4660-f3be-424e-8635-f4e85fca56be\") " Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.695307 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.695323 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.699374 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs" (OuterVolumeSpecName: "kube-api-access-97rhs") pod "314e4660-f3be-424e-8635-f4e85fca56be" (UID: "314e4660-f3be-424e-8635-f4e85fca56be"). InnerVolumeSpecName "kube-api-access-97rhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.701358 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk" (OuterVolumeSpecName: "kube-api-access-lvpkk") pod "195616d8-386c-4b49-80b8-6a8f1dfd87a6" (UID: "195616d8-386c-4b49-80b8-6a8f1dfd87a6"). InnerVolumeSpecName "kube-api-access-lvpkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.720971 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.733888 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "195616d8-386c-4b49-80b8-6a8f1dfd87a6" (UID: "195616d8-386c-4b49-80b8-6a8f1dfd87a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.736533 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "314e4660-f3be-424e-8635-f4e85fca56be" (UID: "314e4660-f3be-424e-8635-f4e85fca56be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.750208 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.796040 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/314e4660-f3be-424e-8635-f4e85fca56be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.796073 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvpkk\" (UniqueName: \"kubernetes.io/projected/195616d8-386c-4b49-80b8-6a8f1dfd87a6-kube-api-access-lvpkk\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.796085 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97rhs\" (UniqueName: \"kubernetes.io/projected/314e4660-f3be-424e-8635-f4e85fca56be-kube-api-access-97rhs\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:44 crc kubenswrapper[4849]: I1203 12:23:44.796094 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/195616d8-386c-4b49-80b8-6a8f1dfd87a6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.086087 4849 generic.go:334] "Generic (PLEG): container finished" podID="314e4660-f3be-424e-8635-f4e85fca56be" containerID="23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9" exitCode=0 Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.086188 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hlk48" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.086201 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerDied","Data":"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9"} Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.086727 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hlk48" event={"ID":"314e4660-f3be-424e-8635-f4e85fca56be","Type":"ContainerDied","Data":"a165e79ffb17450c251d42bb977b9feb225fc346f8628e3a52bbadec99917dd1"} Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.086759 4849 scope.go:117] "RemoveContainer" containerID="23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.089840 4849 generic.go:334] "Generic (PLEG): container finished" podID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerID="7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1" exitCode=0 Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.089894 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerDied","Data":"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1"} Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.089917 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jbm2s" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.089936 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jbm2s" event={"ID":"195616d8-386c-4b49-80b8-6a8f1dfd87a6","Type":"ContainerDied","Data":"dce64a543bdf752432b0bb20c32d5ddf46d65dbaf6a89b2cd01245f19b50bf4e"} Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.100213 4849 scope.go:117] "RemoveContainer" containerID="3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.109192 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.114819 4849 scope.go:117] "RemoveContainer" containerID="6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.116433 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hlk48"] Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.122921 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.124565 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jbm2s"] Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.136097 4849 scope.go:117] "RemoveContainer" containerID="23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.137271 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9\": container with ID starting with 23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9 not found: ID does not exist" containerID="23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.137315 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9"} err="failed to get container status \"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9\": rpc error: code = NotFound desc = could not find container \"23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9\": container with ID starting with 23b7b65dc8c26533207c3b647c2b91a0bf8c6dc065dbd44548531571ab6427a9 not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.137366 4849 scope.go:117] "RemoveContainer" containerID="3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.137678 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b\": container with ID starting with 3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b not found: ID does not exist" containerID="3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.137725 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b"} err="failed to get container status \"3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b\": rpc error: code = NotFound desc = could not find container \"3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b\": container with ID starting with 3ac24c753a74fa0d2ae61ecec175240a34ed77661ef50dd403de2aaea9557a3b not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.137746 4849 scope.go:117] "RemoveContainer" containerID="6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.138043 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe\": container with ID starting with 6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe not found: ID does not exist" containerID="6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.138077 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe"} err="failed to get container status \"6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe\": rpc error: code = NotFound desc = could not find container \"6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe\": container with ID starting with 6af6eff0cfe3bd3433b6750d39bb776cfa51600d31d97799f3c5675d9f77b8fe not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.138101 4849 scope.go:117] "RemoveContainer" containerID="7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.147508 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.150634 4849 scope.go:117] "RemoveContainer" containerID="72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.165121 4849 scope.go:117] "RemoveContainer" containerID="5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.173169 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.175597 4849 scope.go:117] "RemoveContainer" containerID="7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.176121 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1\": container with ID starting with 7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1 not found: ID does not exist" containerID="7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.176165 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1"} err="failed to get container status \"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1\": rpc error: code = NotFound desc = could not find container \"7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1\": container with ID starting with 7aa637a239843268977a398ecf3e162d759c872c0833e0fa61a4f2f5efe92eb1 not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.176190 4849 scope.go:117] "RemoveContainer" containerID="72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.176506 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b\": container with ID starting with 72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b not found: ID does not exist" containerID="72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.176533 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b"} err="failed to get container status \"72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b\": rpc error: code = NotFound desc = could not find container \"72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b\": container with ID starting with 72a6a2330d3a3a9fc97e2f6593a2ca63f16770283c342d365f734d2bce74144b not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.176554 4849 scope.go:117] "RemoveContainer" containerID="5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113" Dec 03 12:23:45 crc kubenswrapper[4849]: E1203 12:23:45.178484 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113\": container with ID starting with 5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113 not found: ID does not exist" containerID="5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.178511 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113"} err="failed to get container status \"5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113\": rpc error: code = NotFound desc = could not find container \"5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113\": container with ID starting with 5a4217d62600dd7a2e79ca76b330f96b5b1e15acd31309386a3bfc41a3c86113 not found: ID does not exist" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.748631 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.748855 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k4pjk" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="registry-server" containerID="cri-o://93e7a511cc932c8bb7156beef0daecd198203930cb31b8833c087959ce830900" gracePeriod=2 Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.862805 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" path="/var/lib/kubelet/pods/195616d8-386c-4b49-80b8-6a8f1dfd87a6/volumes" Dec 03 12:23:45 crc kubenswrapper[4849]: I1203 12:23:45.863351 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="314e4660-f3be-424e-8635-f4e85fca56be" path="/var/lib/kubelet/pods/314e4660-f3be-424e-8635-f4e85fca56be/volumes" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.096490 4849 generic.go:334] "Generic (PLEG): container finished" podID="1c85938a-071b-468a-a7d6-7a106913311b" containerID="93e7a511cc932c8bb7156beef0daecd198203930cb31b8833c087959ce830900" exitCode=0 Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.096529 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerDied","Data":"93e7a511cc932c8bb7156beef0daecd198203930cb31b8833c087959ce830900"} Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.130004 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.211738 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content\") pod \"1c85938a-071b-468a-a7d6-7a106913311b\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.211784 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities\") pod \"1c85938a-071b-468a-a7d6-7a106913311b\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.211840 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dfbh\" (UniqueName: \"kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh\") pod \"1c85938a-071b-468a-a7d6-7a106913311b\" (UID: \"1c85938a-071b-468a-a7d6-7a106913311b\") " Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.212414 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities" (OuterVolumeSpecName: "utilities") pod "1c85938a-071b-468a-a7d6-7a106913311b" (UID: "1c85938a-071b-468a-a7d6-7a106913311b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.217336 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh" (OuterVolumeSpecName: "kube-api-access-5dfbh") pod "1c85938a-071b-468a-a7d6-7a106913311b" (UID: "1c85938a-071b-468a-a7d6-7a106913311b"). InnerVolumeSpecName "kube-api-access-5dfbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.226331 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c85938a-071b-468a-a7d6-7a106913311b" (UID: "1c85938a-071b-468a-a7d6-7a106913311b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.313127 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.313153 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dfbh\" (UniqueName: \"kubernetes.io/projected/1c85938a-071b-468a-a7d6-7a106913311b-kube-api-access-5dfbh\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:46 crc kubenswrapper[4849]: I1203 12:23:46.313177 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c85938a-071b-468a-a7d6-7a106913311b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.110554 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k4pjk" event={"ID":"1c85938a-071b-468a-a7d6-7a106913311b","Type":"ContainerDied","Data":"c6b3874be43327808c1d9ae5387d2035d55979de60f6bef7c04cafcd1290259b"} Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.110658 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k4pjk" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.111252 4849 scope.go:117] "RemoveContainer" containerID="93e7a511cc932c8bb7156beef0daecd198203930cb31b8833c087959ce830900" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.124628 4849 scope.go:117] "RemoveContainer" containerID="81d4e149e4f3d8dbf52c448e74f39936491680d194f1a4f75e418c00f8da480d" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.132288 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.134521 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k4pjk"] Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.140226 4849 scope.go:117] "RemoveContainer" containerID="b0013266c223be31ee8c4106b60842b4ef64597373a735655948e99c9b56a8cc" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.483732 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6q7bv" Dec 03 12:23:47 crc kubenswrapper[4849]: I1203 12:23:47.861709 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c85938a-071b-468a-a7d6-7a106913311b" path="/var/lib/kubelet/pods/1c85938a-071b-468a-a7d6-7a106913311b/volumes" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.347504 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.347734 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xdhfk" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="registry-server" containerID="cri-o://e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295" gracePeriod=2 Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.709301 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.738132 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content\") pod \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.738181 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities\") pod \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.738205 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-299jg\" (UniqueName: \"kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg\") pod \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\" (UID: \"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56\") " Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.738842 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities" (OuterVolumeSpecName: "utilities") pod "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" (UID: "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.745570 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg" (OuterVolumeSpecName: "kube-api-access-299jg") pod "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" (UID: "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56"). InnerVolumeSpecName "kube-api-access-299jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.815294 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" (UID: "15583ad6-2adf-4151-aa5b-8a2f4ba0bb56"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.839496 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.839530 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:48 crc kubenswrapper[4849]: I1203 12:23:48.839541 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-299jg\" (UniqueName: \"kubernetes.io/projected/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56-kube-api-access-299jg\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.120405 4849 generic.go:334] "Generic (PLEG): container finished" podID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerID="e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295" exitCode=0 Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.120440 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerDied","Data":"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295"} Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.120455 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xdhfk" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.120466 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xdhfk" event={"ID":"15583ad6-2adf-4151-aa5b-8a2f4ba0bb56","Type":"ContainerDied","Data":"afa846409e87684df06ad8e23e6ecaa3a81ada28e38de07e41ba92f654331c35"} Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.120500 4849 scope.go:117] "RemoveContainer" containerID="e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.131404 4849 scope.go:117] "RemoveContainer" containerID="c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.139861 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.140763 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xdhfk"] Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.144361 4849 scope.go:117] "RemoveContainer" containerID="ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.164683 4849 scope.go:117] "RemoveContainer" containerID="e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295" Dec 03 12:23:49 crc kubenswrapper[4849]: E1203 12:23:49.164935 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295\": container with ID starting with e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295 not found: ID does not exist" containerID="e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.164968 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295"} err="failed to get container status \"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295\": rpc error: code = NotFound desc = could not find container \"e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295\": container with ID starting with e350f25548c486e296ffda0678e8f2a31bb80e7088911731d356ca88619bd295 not found: ID does not exist" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.164990 4849 scope.go:117] "RemoveContainer" containerID="c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345" Dec 03 12:23:49 crc kubenswrapper[4849]: E1203 12:23:49.165201 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345\": container with ID starting with c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345 not found: ID does not exist" containerID="c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.165221 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345"} err="failed to get container status \"c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345\": rpc error: code = NotFound desc = could not find container \"c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345\": container with ID starting with c33074bc98ec0e05edc258a0fae64b9e53e13680a48bce2c077a8c0cc0b9f345 not found: ID does not exist" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.165234 4849 scope.go:117] "RemoveContainer" containerID="ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35" Dec 03 12:23:49 crc kubenswrapper[4849]: E1203 12:23:49.165417 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35\": container with ID starting with ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35 not found: ID does not exist" containerID="ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.165442 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35"} err="failed to get container status \"ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35\": rpc error: code = NotFound desc = could not find container \"ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35\": container with ID starting with ea3f657639aef007fb8682a93a5ee2ca84ea206c62d95d4f36155c4723f8ca35 not found: ID does not exist" Dec 03 12:23:49 crc kubenswrapper[4849]: I1203 12:23:49.860325 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" path="/var/lib/kubelet/pods/15583ad6-2adf-4151-aa5b-8a2f4ba0bb56/volumes" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980075 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980267 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980279 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980288 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980293 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980300 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e444288e-49e3-4708-bd0b-9c70b5f09796" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980305 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="e444288e-49e3-4708-bd0b-9c70b5f09796" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980311 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980316 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980323 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980341 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980349 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980354 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980362 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980367 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="extract-utilities" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980374 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980379 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980390 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980395 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980403 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980409 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980417 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980422 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980428 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980433 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980441 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2187ef92-2a54-4646-9c49-9fa970e8ae23" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980446 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="2187ef92-2a54-4646-9c49-9fa970e8ae23" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: E1203 12:23:51.980453 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980458 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="extract-content" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980535 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="314e4660-f3be-424e-8635-f4e85fca56be" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980544 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="e444288e-49e3-4708-bd0b-9c70b5f09796" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980550 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="195616d8-386c-4b49-80b8-6a8f1dfd87a6" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980559 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="15583ad6-2adf-4151-aa5b-8a2f4ba0bb56" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980565 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c85938a-071b-468a-a7d6-7a106913311b" containerName="registry-server" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980575 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="2187ef92-2a54-4646-9c49-9fa970e8ae23" containerName="pruner" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.980891 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.984609 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.984865 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:23:51 crc kubenswrapper[4849]: I1203 12:23:51.987903 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.169618 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.169758 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.270068 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.270123 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.270194 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.283200 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.293080 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.625779 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 12:23:52 crc kubenswrapper[4849]: W1203 12:23:52.630055 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod386a5d91_3415_4517_be37_48f7f5fa5b63.slice/crio-8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec WatchSource:0}: Error finding container 8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec: Status 404 returned error can't find the container with id 8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.676887 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:23:52 crc kubenswrapper[4849]: I1203 12:23:52.676929 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:23:53 crc kubenswrapper[4849]: I1203 12:23:53.138913 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"386a5d91-3415-4517-be37-48f7f5fa5b63","Type":"ContainerStarted","Data":"5ff47cc160ab6c2d6ccffa252a6a2cf4b9fa65996646248ecad8bee32ab311bd"} Dec 03 12:23:53 crc kubenswrapper[4849]: I1203 12:23:53.139123 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"386a5d91-3415-4517-be37-48f7f5fa5b63","Type":"ContainerStarted","Data":"8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec"} Dec 03 12:23:53 crc kubenswrapper[4849]: I1203 12:23:53.152915 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.152902804 podStartE2EDuration="2.152902804s" podCreationTimestamp="2025-12-03 12:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:53.150947006 +0000 UTC m=+179.612794789" watchObservedRunningTime="2025-12-03 12:23:53.152902804 +0000 UTC m=+179.614750588" Dec 03 12:23:54 crc kubenswrapper[4849]: I1203 12:23:54.144016 4849 generic.go:334] "Generic (PLEG): container finished" podID="386a5d91-3415-4517-be37-48f7f5fa5b63" containerID="5ff47cc160ab6c2d6ccffa252a6a2cf4b9fa65996646248ecad8bee32ab311bd" exitCode=0 Dec 03 12:23:54 crc kubenswrapper[4849]: I1203 12:23:54.144050 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"386a5d91-3415-4517-be37-48f7f5fa5b63","Type":"ContainerDied","Data":"5ff47cc160ab6c2d6ccffa252a6a2cf4b9fa65996646248ecad8bee32ab311bd"} Dec 03 12:23:54 crc kubenswrapper[4849]: I1203 12:23:54.997473 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.457955 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.604244 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir\") pod \"386a5d91-3415-4517-be37-48f7f5fa5b63\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.604474 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access\") pod \"386a5d91-3415-4517-be37-48f7f5fa5b63\" (UID: \"386a5d91-3415-4517-be37-48f7f5fa5b63\") " Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.604362 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "386a5d91-3415-4517-be37-48f7f5fa5b63" (UID: "386a5d91-3415-4517-be37-48f7f5fa5b63"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.604758 4849 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/386a5d91-3415-4517-be37-48f7f5fa5b63-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.608598 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "386a5d91-3415-4517-be37-48f7f5fa5b63" (UID: "386a5d91-3415-4517-be37-48f7f5fa5b63"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:23:55 crc kubenswrapper[4849]: I1203 12:23:55.705331 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/386a5d91-3415-4517-be37-48f7f5fa5b63-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:23:56 crc kubenswrapper[4849]: I1203 12:23:56.153946 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"386a5d91-3415-4517-be37-48f7f5fa5b63","Type":"ContainerDied","Data":"8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec"} Dec 03 12:23:56 crc kubenswrapper[4849]: I1203 12:23:56.153984 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bd06ec00fe3a81c7c323d29797d36c6ee68ac0c730882f65fbc8902dc3e86ec" Dec 03 12:23:56 crc kubenswrapper[4849]: I1203 12:23:56.153990 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.979199 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:23:57 crc kubenswrapper[4849]: E1203 12:23:57.979406 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386a5d91-3415-4517-be37-48f7f5fa5b63" containerName="pruner" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.979418 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="386a5d91-3415-4517-be37-48f7f5fa5b63" containerName="pruner" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.979506 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="386a5d91-3415-4517-be37-48f7f5fa5b63" containerName="pruner" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.979832 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.982023 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.982578 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 12:23:57 crc kubenswrapper[4849]: I1203 12:23:57.987391 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.133105 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.133204 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.133225 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.234034 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.234131 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.234340 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.234551 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.234596 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.247885 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.289884 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:23:58 crc kubenswrapper[4849]: I1203 12:23:58.619738 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 12:23:58 crc kubenswrapper[4849]: W1203 12:23:58.620746 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5ba61539_0f8b_49bd_b7e3_623e58932769.slice/crio-964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5 WatchSource:0}: Error finding container 964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5: Status 404 returned error can't find the container with id 964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5 Dec 03 12:23:59 crc kubenswrapper[4849]: I1203 12:23:59.166582 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ba61539-0f8b-49bd-b7e3-623e58932769","Type":"ContainerStarted","Data":"09a5ecdfe8ffde407d57f5361d3ef8ea641215dbf06c7bd7e1e6b831426a199b"} Dec 03 12:23:59 crc kubenswrapper[4849]: I1203 12:23:59.166801 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ba61539-0f8b-49bd-b7e3-623e58932769","Type":"ContainerStarted","Data":"964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5"} Dec 03 12:23:59 crc kubenswrapper[4849]: I1203 12:23:59.176676 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.176660838 podStartE2EDuration="2.176660838s" podCreationTimestamp="2025-12-03 12:23:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:23:59.175924642 +0000 UTC m=+185.637772425" watchObservedRunningTime="2025-12-03 12:23:59.176660838 +0000 UTC m=+185.638508620" Dec 03 12:23:59 crc kubenswrapper[4849]: I1203 12:23:59.976749 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.018680 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerName="oauth-openshift" containerID="cri-o://30ca7adfa0d56bcd9586142249f4e058b81c3d1589c8e27b091b09ddaf9eddd7" gracePeriod=15 Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.243215 4849 generic.go:334] "Generic (PLEG): container finished" podID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerID="30ca7adfa0d56bcd9586142249f4e058b81c3d1589c8e27b091b09ddaf9eddd7" exitCode=0 Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.243254 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" event={"ID":"8478b9f3-643c-490d-8f8b-663e19230dc2","Type":"ContainerDied","Data":"30ca7adfa0d56bcd9586142249f4e058b81c3d1589c8e27b091b09ddaf9eddd7"} Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.301095 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.320471 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6c495d4986-z69z6"] Dec 03 12:24:20 crc kubenswrapper[4849]: E1203 12:24:20.320633 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerName="oauth-openshift" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.320659 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerName="oauth-openshift" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.320740 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" containerName="oauth-openshift" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.321016 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.331948 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6c495d4986-z69z6"] Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450692 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450733 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450762 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450785 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450807 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450843 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldtj9\" (UniqueName: \"kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450862 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450881 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450908 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450926 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.450946 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.451965 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.452039 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.452053 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.452164 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert\") pod \"8478b9f3-643c-490d-8f8b-663e19230dc2\" (UID: \"8478b9f3-643c-490d-8f8b-663e19230dc2\") " Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.452318 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.453157 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.454401 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.454473 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455465 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-session\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455616 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455663 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455699 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fxsb\" (UniqueName: \"kubernetes.io/projected/d3641db9-129e-4efb-a2de-d275988f7019-kube-api-access-8fxsb\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455736 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455758 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455779 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-error\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455775 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455797 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455823 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-login\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.455976 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456024 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-audit-policies\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456070 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456114 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456151 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3641db9-129e-4efb-a2de-d275988f7019-audit-dir\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456282 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456302 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456312 4849 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456324 4849 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8478b9f3-643c-490d-8f8b-663e19230dc2-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456338 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.456349 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.457148 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.457471 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.458814 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9" (OuterVolumeSpecName: "kube-api-access-ldtj9") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "kube-api-access-ldtj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.459128 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.459407 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.459948 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.460101 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.460360 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "8478b9f3-643c-490d-8f8b-663e19230dc2" (UID: "8478b9f3-643c-490d-8f8b-663e19230dc2"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556613 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fxsb\" (UniqueName: \"kubernetes.io/projected/d3641db9-129e-4efb-a2de-d275988f7019-kube-api-access-8fxsb\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556671 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556691 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556708 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556723 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-error\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556741 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-login\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556765 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.556783 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-audit-policies\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.557629 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-audit-policies\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.557822 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.557870 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558243 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558309 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3641db9-129e-4efb-a2de-d275988f7019-audit-dir\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558333 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-session\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558371 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558390 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558437 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldtj9\" (UniqueName: \"kubernetes.io/projected/8478b9f3-643c-490d-8f8b-663e19230dc2-kube-api-access-ldtj9\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558449 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558458 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558468 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558477 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558485 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558501 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558510 4849 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8478b9f3-643c-490d-8f8b-663e19230dc2-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.558663 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d3641db9-129e-4efb-a2de-d275988f7019-audit-dir\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.559392 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.559413 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.559467 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-service-ca\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.559831 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.560070 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.560311 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-error\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.560667 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-login\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.560955 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-session\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.561187 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.561339 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d3641db9-129e-4efb-a2de-d275988f7019-v4-0-config-system-router-certs\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.568425 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fxsb\" (UniqueName: \"kubernetes.io/projected/d3641db9-129e-4efb-a2de-d275988f7019-kube-api-access-8fxsb\") pod \"oauth-openshift-6c495d4986-z69z6\" (UID: \"d3641db9-129e-4efb-a2de-d275988f7019\") " pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.631365 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:20 crc kubenswrapper[4849]: I1203 12:24:20.947660 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6c495d4986-z69z6"] Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.247804 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" event={"ID":"d3641db9-129e-4efb-a2de-d275988f7019","Type":"ContainerStarted","Data":"30985320f8a58025da58d8c0b1be4dea16000f379271fd841d2cb7a81ca88e0d"} Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.247975 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" event={"ID":"d3641db9-129e-4efb-a2de-d275988f7019","Type":"ContainerStarted","Data":"0446aa2a3037f189ff197b2c4dbff62cd600369d4253c0a2a647af0d09c42543"} Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.247989 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.248931 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" event={"ID":"8478b9f3-643c-490d-8f8b-663e19230dc2","Type":"ContainerDied","Data":"8d6aa84b3ded8ae691d13895e3fd6d2653465d6db4c8ce4804118c2c2392efd4"} Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.248961 4849 scope.go:117] "RemoveContainer" containerID="30ca7adfa0d56bcd9586142249f4e058b81c3d1589c8e27b091b09ddaf9eddd7" Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.249063 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f8svg" Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.263262 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" podStartSLOduration=26.263248554 podStartE2EDuration="26.263248554s" podCreationTimestamp="2025-12-03 12:23:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:24:21.262110434 +0000 UTC m=+207.723958217" watchObservedRunningTime="2025-12-03 12:24:21.263248554 +0000 UTC m=+207.725096337" Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.272941 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.276060 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f8svg"] Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.391853 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6c495d4986-z69z6" Dec 03 12:24:21 crc kubenswrapper[4849]: I1203 12:24:21.861126 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8478b9f3-643c-490d-8f8b-663e19230dc2" path="/var/lib/kubelet/pods/8478b9f3-643c-490d-8f8b-663e19230dc2/volumes" Dec 03 12:24:22 crc kubenswrapper[4849]: I1203 12:24:22.676979 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:24:22 crc kubenswrapper[4849]: I1203 12:24:22.677151 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:24:22 crc kubenswrapper[4849]: I1203 12:24:22.677202 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:24:22 crc kubenswrapper[4849]: I1203 12:24:22.677699 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:24:22 crc kubenswrapper[4849]: I1203 12:24:22.677744 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be" gracePeriod=600 Dec 03 12:24:23 crc kubenswrapper[4849]: I1203 12:24:23.259591 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be" exitCode=0 Dec 03 12:24:23 crc kubenswrapper[4849]: I1203 12:24:23.259681 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be"} Dec 03 12:24:23 crc kubenswrapper[4849]: I1203 12:24:23.259722 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4"} Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.204487 4849 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.205372 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.205429 4849 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.205674 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf" gracePeriod=15 Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206027 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a" gracePeriod=15 Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206081 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3" gracePeriod=15 Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206129 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3" gracePeriod=15 Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206118 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4" gracePeriod=15 Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206674 4849 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206844 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206854 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206862 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206867 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206878 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206884 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206894 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206899 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206906 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206911 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206919 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206925 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: E1203 12:24:36.206932 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.206937 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207016 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207027 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207035 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207042 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207050 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.207057 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.212995 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.213539 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.213628 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.213837 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.213929 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.213958 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.214002 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.214046 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315541 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315770 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315796 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315670 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315817 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315847 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315866 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315881 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315882 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315909 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315895 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.315925 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.316012 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.316040 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.316045 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:36 crc kubenswrapper[4849]: I1203 12:24:36.316107 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.313607 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.314511 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.315151 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a" exitCode=0 Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.315184 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3" exitCode=0 Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.315193 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3" exitCode=0 Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.315202 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4" exitCode=2 Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.315233 4849 scope.go:117] "RemoveContainer" containerID="3cddac9e2e5606b07f186615810178bae7d070997be82104cd127cf5711384b0" Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.320938 4849 generic.go:334] "Generic (PLEG): container finished" podID="5ba61539-0f8b-49bd-b7e3-623e58932769" containerID="09a5ecdfe8ffde407d57f5361d3ef8ea641215dbf06c7bd7e1e6b831426a199b" exitCode=0 Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.320966 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ba61539-0f8b-49bd-b7e3-623e58932769","Type":"ContainerDied","Data":"09a5ecdfe8ffde407d57f5361d3ef8ea641215dbf06c7bd7e1e6b831426a199b"} Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.321448 4849 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:37 crc kubenswrapper[4849]: I1203 12:24:37.321691 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.090426 4849 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.090879 4849 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.091305 4849 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.091751 4849 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.092029 4849 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.092066 4849 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.092274 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="200ms" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.292730 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="400ms" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.328112 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.446999 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.447622 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.448284 4849 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.448533 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.499910 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.500215 4849 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.500483 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636370 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir\") pod \"5ba61539-0f8b-49bd-b7e3-623e58932769\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636422 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access\") pod \"5ba61539-0f8b-49bd-b7e3-623e58932769\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636475 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636495 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock\") pod \"5ba61539-0f8b-49bd-b7e3-623e58932769\" (UID: \"5ba61539-0f8b-49bd-b7e3-623e58932769\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636495 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5ba61539-0f8b-49bd-b7e3-623e58932769" (UID: "5ba61539-0f8b-49bd-b7e3-623e58932769"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636515 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636536 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636542 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock" (OuterVolumeSpecName: "var-lock") pod "5ba61539-0f8b-49bd-b7e3-623e58932769" (UID: "5ba61539-0f8b-49bd-b7e3-623e58932769"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636567 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636562 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636602 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636810 4849 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636824 4849 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636833 4849 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636840 4849 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ba61539-0f8b-49bd-b7e3-623e58932769-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.636847 4849 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.640679 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5ba61539-0f8b-49bd-b7e3-623e58932769" (UID: "5ba61539-0f8b-49bd-b7e3-623e58932769"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:24:38 crc kubenswrapper[4849]: E1203 12:24:38.693528 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="800ms" Dec 03 12:24:38 crc kubenswrapper[4849]: I1203 12:24:38.737948 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ba61539-0f8b-49bd-b7e3-623e58932769-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.335480 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.336031 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf" exitCode=0 Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.336082 4849 scope.go:117] "RemoveContainer" containerID="ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.336130 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.337603 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ba61539-0f8b-49bd-b7e3-623e58932769","Type":"ContainerDied","Data":"964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5"} Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.337625 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="964df2fbc240ddb4fcb000541550f59e5b4902277de2e07097b4cb16d1d907e5" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.337661 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.347655 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.348007 4849 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.348067 4849 scope.go:117] "RemoveContainer" containerID="fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.348281 4849 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.348475 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.356625 4849 scope.go:117] "RemoveContainer" containerID="fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.366277 4849 scope.go:117] "RemoveContainer" containerID="4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.374955 4849 scope.go:117] "RemoveContainer" containerID="d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.387680 4849 scope.go:117] "RemoveContainer" containerID="772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.401398 4849 scope.go:117] "RemoveContainer" containerID="ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.401966 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\": container with ID starting with ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a not found: ID does not exist" containerID="ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.402060 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a"} err="failed to get container status \"ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\": rpc error: code = NotFound desc = could not find container \"ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a\": container with ID starting with ed1a1b21586291c95c018e489f58160472a688c7ae9cc0e09b9a4d4390db163a not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.402696 4849 scope.go:117] "RemoveContainer" containerID="fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.403787 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\": container with ID starting with fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3 not found: ID does not exist" containerID="fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.403814 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3"} err="failed to get container status \"fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\": rpc error: code = NotFound desc = could not find container \"fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3\": container with ID starting with fbd9a898eafd24343d9dcec37e026087dfe56a4fedf060df9e893f6fd3aa68e3 not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.403830 4849 scope.go:117] "RemoveContainer" containerID="fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.404061 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\": container with ID starting with fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3 not found: ID does not exist" containerID="fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404080 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3"} err="failed to get container status \"fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\": rpc error: code = NotFound desc = could not find container \"fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3\": container with ID starting with fc41a32ef5a1b7c1bf12d74430e8aec2fc1463f431e9777c531d634d067476d3 not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404093 4849 scope.go:117] "RemoveContainer" containerID="4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.404360 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\": container with ID starting with 4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4 not found: ID does not exist" containerID="4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404381 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4"} err="failed to get container status \"4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\": rpc error: code = NotFound desc = could not find container \"4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4\": container with ID starting with 4a6c808ad82f3e73622bc29ee5c77b08129a3bb08ae94f81934749de1741b9f4 not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404395 4849 scope.go:117] "RemoveContainer" containerID="d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.404585 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\": container with ID starting with d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf not found: ID does not exist" containerID="d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404606 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf"} err="failed to get container status \"d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\": rpc error: code = NotFound desc = could not find container \"d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf\": container with ID starting with d37927caf0059664a32bda44fb8feed0505fdefa6ca8a2de3c11eaa2210562bf not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404618 4849 scope.go:117] "RemoveContainer" containerID="772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.404863 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\": container with ID starting with 772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502 not found: ID does not exist" containerID="772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.404897 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502"} err="failed to get container status \"772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\": rpc error: code = NotFound desc = could not find container \"772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502\": container with ID starting with 772f7131bc8c2c36d5f814f74cdf111a8b91f46c007bad9098196c190b2eb502 not found: ID does not exist" Dec 03 12:24:39 crc kubenswrapper[4849]: E1203 12:24:39.494000 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="1.6s" Dec 03 12:24:39 crc kubenswrapper[4849]: I1203 12:24:39.861246 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 12:24:41 crc kubenswrapper[4849]: E1203 12:24:41.095256 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="3.2s" Dec 03 12:24:41 crc kubenswrapper[4849]: E1203 12:24:41.226214 4849 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:41 crc kubenswrapper[4849]: I1203 12:24:41.226465 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:41 crc kubenswrapper[4849]: E1203 12:24:41.242500 4849 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.198:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187db420a197d8e2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:24:41.24213885 +0000 UTC m=+227.703986634,LastTimestamp:2025-12-03 12:24:41.24213885 +0000 UTC m=+227.703986634,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:24:41 crc kubenswrapper[4849]: I1203 12:24:41.347300 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"1d0834a9afa54d479ed2c63382140aca9c364278c9c8a7cf2558fa5ed56ba724"} Dec 03 12:24:42 crc kubenswrapper[4849]: I1203 12:24:42.351336 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46"} Dec 03 12:24:42 crc kubenswrapper[4849]: E1203 12:24:42.351772 4849 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:42 crc kubenswrapper[4849]: I1203 12:24:42.351779 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:43 crc kubenswrapper[4849]: E1203 12:24:43.354887 4849 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:24:43 crc kubenswrapper[4849]: I1203 12:24:43.858848 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:44 crc kubenswrapper[4849]: E1203 12:24:44.295990 4849 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.198:6443: connect: connection refused" interval="6.4s" Dec 03 12:24:44 crc kubenswrapper[4849]: E1203 12:24:44.853935 4849 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.198:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187db420a197d8e2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 12:24:41.24213885 +0000 UTC m=+227.703986634,LastTimestamp:2025-12-03 12:24:41.24213885 +0000 UTC m=+227.703986634,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 12:24:46 crc kubenswrapper[4849]: I1203 12:24:46.855901 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:46 crc kubenswrapper[4849]: I1203 12:24:46.856540 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:46 crc kubenswrapper[4849]: I1203 12:24:46.866707 4849 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:46 crc kubenswrapper[4849]: I1203 12:24:46.866739 4849 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:46 crc kubenswrapper[4849]: E1203 12:24:46.867060 4849 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:46 crc kubenswrapper[4849]: I1203 12:24:46.867325 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.370838 4849 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="1a674f7e92535b51b1640a5b5a9478006101223e6fcff992dca41edf4328ebdf" exitCode=0 Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.370920 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"1a674f7e92535b51b1640a5b5a9478006101223e6fcff992dca41edf4328ebdf"} Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.371030 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c531492edd2798ff2d3da5411a46d346c8a1ac11118c2ca4468a936157edcc25"} Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.371239 4849 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.371251 4849 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:47 crc kubenswrapper[4849]: I1203 12:24:47.371482 4849 status_manager.go:851] "Failed to get status for pod" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" Dec 03 12:24:47 crc kubenswrapper[4849]: E1203 12:24:47.371540 4849 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.378211 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"845af2610d2639feaada894af0cc4f9299a3d5bc1b8f9b5dbcd80ab2dc912418"} Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.378961 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9ba29ed316f5405ee6d27b37b618d11a823142022e219e8170ca725fd724e03d"} Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.378999 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7eb7fbbbfdc5023ac7170d6fd3450df7afc6e94b6e07522f4b20eed803e97ea3"} Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.379008 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"572a9850f76548b0c97e5ff036fd3ba780dc51c417da322d6944ffcfe08e3c47"} Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.379018 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2ab49048b31edd956396a9b6e00776969712ecc90f06b797d88c41087d89da60"} Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.379203 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.379292 4849 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:48 crc kubenswrapper[4849]: I1203 12:24:48.379311 4849 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:51 crc kubenswrapper[4849]: I1203 12:24:51.868210 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:51 crc kubenswrapper[4849]: I1203 12:24:51.868418 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:51 crc kubenswrapper[4849]: I1203 12:24:51.872048 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:53 crc kubenswrapper[4849]: I1203 12:24:53.582666 4849 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:53 crc kubenswrapper[4849]: I1203 12:24:53.866287 4849 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="89f822e8-6417-4b11-8556-5dfff5b4b272" Dec 03 12:24:54 crc kubenswrapper[4849]: I1203 12:24:54.399663 4849 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:54 crc kubenswrapper[4849]: I1203 12:24:54.399690 4849 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:54 crc kubenswrapper[4849]: I1203 12:24:54.401573 4849 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="89f822e8-6417-4b11-8556-5dfff5b4b272" Dec 03 12:24:54 crc kubenswrapper[4849]: I1203 12:24:54.403988 4849 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://2ab49048b31edd956396a9b6e00776969712ecc90f06b797d88c41087d89da60" Dec 03 12:24:54 crc kubenswrapper[4849]: I1203 12:24:54.404010 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.404323 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.404508 4849 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242" exitCode=1 Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.404722 4849 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.404733 4849 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="11092d44-9365-49cc-a1f7-74b1d12d1750" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.404793 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242"} Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.405236 4849 scope.go:117] "RemoveContainer" containerID="bc812eb65f91e8486a1e3f85a6952732d34a98e6357e9c701cb0ef1427f07242" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.407884 4849 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="89f822e8-6417-4b11-8556-5dfff5b4b272" Dec 03 12:24:55 crc kubenswrapper[4849]: I1203 12:24:55.915047 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:24:56 crc kubenswrapper[4849]: I1203 12:24:56.411545 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 12:24:56 crc kubenswrapper[4849]: I1203 12:24:56.411600 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d7c94bfa00a022b60523cdb09d0ec948b2d62d180bf2f8dee93d75ed14fd7bed"} Dec 03 12:24:59 crc kubenswrapper[4849]: I1203 12:24:59.845794 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:25:00 crc kubenswrapper[4849]: I1203 12:25:00.389932 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:25:00 crc kubenswrapper[4849]: I1203 12:25:00.520443 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 12:25:00 crc kubenswrapper[4849]: I1203 12:25:00.524886 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.058582 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.157725 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.305161 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.369660 4849 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.372823 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.372896 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.376011 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.386705 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=8.386690904 podStartE2EDuration="8.386690904s" podCreationTimestamp="2025-12-03 12:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:25:01.383904578 +0000 UTC m=+247.845752362" watchObservedRunningTime="2025-12-03 12:25:01.386690904 +0000 UTC m=+247.848538677" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.402545 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.561910 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.604306 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.740180 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.780819 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 12:25:01 crc kubenswrapper[4849]: I1203 12:25:01.873099 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.215879 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.258531 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.283918 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.316921 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.415258 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.453285 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.497413 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.792218 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.959940 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 12:25:02 crc kubenswrapper[4849]: I1203 12:25:02.969963 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.021425 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.065240 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.220675 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.317066 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.489951 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.509471 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.582535 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.611060 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.654887 4849 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.670745 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.672314 4849 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.672492 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46" gracePeriod=5 Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.676036 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.709870 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 12:25:03 crc kubenswrapper[4849]: I1203 12:25:03.729840 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.078181 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.291596 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.346011 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.414417 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.548222 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.704252 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 12:25:04 crc kubenswrapper[4849]: I1203 12:25:04.790333 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.154532 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.183860 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.203309 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.256299 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.620295 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.630530 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.745787 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.888467 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.914990 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.918046 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.938353 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.956709 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 12:25:05 crc kubenswrapper[4849]: I1203 12:25:05.987749 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 12:25:06 crc kubenswrapper[4849]: I1203 12:25:06.128609 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 12:25:06 crc kubenswrapper[4849]: I1203 12:25:06.299165 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 12:25:06 crc kubenswrapper[4849]: I1203 12:25:06.449625 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.162946 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.337442 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.650048 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.714521 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.765463 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.920286 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 12:25:07 crc kubenswrapper[4849]: I1203 12:25:07.944890 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.087328 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.110489 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.349706 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.384536 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.482654 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.486513 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.526930 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.729828 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.746980 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.855442 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 12:25:08 crc kubenswrapper[4849]: I1203 12:25:08.941674 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.136190 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.203945 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.219112 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.219325 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.255420 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352858 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352899 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352934 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352945 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352964 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.352991 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353018 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353040 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353150 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353494 4849 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353510 4849 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353519 4849 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.353528 4849 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.358721 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.372599 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.381106 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.454093 4849 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.458981 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.459032 4849 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46" exitCode=137 Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.459075 4849 scope.go:117] "RemoveContainer" containerID="f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.459081 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.470547 4849 scope.go:117] "RemoveContainer" containerID="f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46" Dec 03 12:25:09 crc kubenswrapper[4849]: E1203 12:25:09.470838 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46\": container with ID starting with f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46 not found: ID does not exist" containerID="f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.470860 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46"} err="failed to get container status \"f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46\": rpc error: code = NotFound desc = could not find container \"f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46\": container with ID starting with f94444fc18a50f600cae76767abc4fb556d2afb6267a086d677d49295f0e2f46 not found: ID does not exist" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.676742 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.704974 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.716229 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.722783 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.725566 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.766697 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.862192 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 12:25:09 crc kubenswrapper[4849]: I1203 12:25:09.867809 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.071405 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.136755 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.402520 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.519518 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.525675 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.535910 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.626508 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.774337 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.798091 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.822278 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.897189 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.904061 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 12:25:10 crc kubenswrapper[4849]: I1203 12:25:10.914734 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.006102 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.037674 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.088388 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.149114 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.173285 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.458357 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.468397 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.611449 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.611535 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.685296 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.737554 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.763626 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.849335 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.860781 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.882140 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 12:25:11 crc kubenswrapper[4849]: I1203 12:25:11.927223 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.170399 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.187835 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.215577 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.253744 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.450976 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.520137 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.723313 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.767205 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.839050 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.919388 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.926422 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 12:25:12 crc kubenswrapper[4849]: I1203 12:25:12.975402 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.011407 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.011926 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.100519 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.131661 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.344521 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.370068 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.450693 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.461933 4849 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.604690 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.612413 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.631097 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 12:25:13 crc kubenswrapper[4849]: I1203 12:25:13.676401 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.171856 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.186269 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.209955 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.313153 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.322607 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.322714 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.456076 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.569604 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.594830 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.673437 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.794467 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.805817 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.819773 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.839708 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.909901 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.909901 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.940016 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 12:25:14 crc kubenswrapper[4849]: I1203 12:25:14.976059 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.017475 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.020441 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.071789 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.072396 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.274573 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.280453 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.350102 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.357994 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.401199 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.443476 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.463308 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.493027 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.614845 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.624022 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.646949 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.657064 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.901736 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 12:25:15 crc kubenswrapper[4849]: I1203 12:25:15.991692 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.048003 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.072246 4849 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.119998 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.157817 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.194208 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.250903 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.284819 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.321402 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.328315 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.341224 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.486177 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.598429 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.631921 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.762471 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.835191 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.955797 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 12:25:16 crc kubenswrapper[4849]: I1203 12:25:16.992671 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.055934 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.110278 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.223081 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.269744 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.386241 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.488708 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.537688 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.559416 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.623806 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.863450 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.864477 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 12:25:17 crc kubenswrapper[4849]: I1203 12:25:17.872491 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.023430 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.077974 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.082250 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.235597 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.278784 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.285713 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.335035 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.659455 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.966384 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 12:25:18 crc kubenswrapper[4849]: I1203 12:25:18.986359 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.057240 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.106939 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.143282 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.245194 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.582183 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.610926 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.710422 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.739406 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.952568 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 12:25:19 crc kubenswrapper[4849]: I1203 12:25:19.991420 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.016432 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.120688 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.140441 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.306549 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.326861 4849 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.418194 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.557254 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.820519 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.830333 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.967595 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 12:25:20 crc kubenswrapper[4849]: I1203 12:25:20.995018 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.005863 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.126045 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.220847 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.297090 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.297156 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.367655 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.637865 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.731876 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.778594 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.874299 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 12:25:21 crc kubenswrapper[4849]: I1203 12:25:21.962092 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 12:25:22 crc kubenswrapper[4849]: I1203 12:25:22.176346 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 12:25:22 crc kubenswrapper[4849]: I1203 12:25:22.395717 4849 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 12:25:22 crc kubenswrapper[4849]: I1203 12:25:22.578990 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 12:25:22 crc kubenswrapper[4849]: I1203 12:25:22.845509 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 12:25:22 crc kubenswrapper[4849]: I1203 12:25:22.935727 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 12:25:23 crc kubenswrapper[4849]: I1203 12:25:23.310606 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.978544 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.979065 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6rcps" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="registry-server" containerID="cri-o://8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08" gracePeriod=30 Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.983846 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.984045 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cwdxv" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="registry-server" containerID="cri-o://e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe" gracePeriod=30 Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.990197 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.990346 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" containerName="marketplace-operator" containerID="cri-o://94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed" gracePeriod=30 Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.997354 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:25:27 crc kubenswrapper[4849]: I1203 12:25:27.997538 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h5j2b" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="registry-server" containerID="cri-o://a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237" gracePeriod=30 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.001374 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.002245 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jvk79" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="registry-server" containerID="cri-o://03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92" gracePeriod=30 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.016799 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bk499"] Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.016998 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.017010 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.017028 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" containerName="installer" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.017033 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" containerName="installer" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.017182 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.017191 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba61539-0f8b-49bd-b7e3-623e58932769" containerName="installer" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.017523 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.023134 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bk499"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.139174 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqc4x\" (UniqueName: \"kubernetes.io/projected/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-kube-api-access-mqc4x\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.139240 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.139264 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.240407 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqc4x\" (UniqueName: \"kubernetes.io/projected/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-kube-api-access-mqc4x\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.240469 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.240491 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.241448 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.252549 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.253558 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqc4x\" (UniqueName: \"kubernetes.io/projected/2079bd80-c5b1-42e4-b5ed-a8c7ba357882-kube-api-access-mqc4x\") pod \"marketplace-operator-79b997595-bk499\" (UID: \"2079bd80-c5b1-42e4-b5ed-a8c7ba357882\") " pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.424017 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.426701 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.432083 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.434971 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443006 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfcd9\" (UniqueName: \"kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9\") pod \"3c329326-860d-4eed-855d-e7811ee41819\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443068 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddjjr\" (UniqueName: \"kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr\") pod \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443110 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities\") pod \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443139 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content\") pod \"4454830a-59f9-4ece-8e5c-554b725015ec\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443168 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52lvf\" (UniqueName: \"kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf\") pod \"4454830a-59f9-4ece-8e5c-554b725015ec\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443204 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities\") pod \"4454830a-59f9-4ece-8e5c-554b725015ec\" (UID: \"4454830a-59f9-4ece-8e5c-554b725015ec\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443218 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities\") pod \"3c329326-860d-4eed-855d-e7811ee41819\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443232 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content\") pod \"3c329326-860d-4eed-855d-e7811ee41819\" (UID: \"3c329326-860d-4eed-855d-e7811ee41819\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.443259 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content\") pod \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\" (UID: \"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.444178 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities" (OuterVolumeSpecName: "utilities") pod "4454830a-59f9-4ece-8e5c-554b725015ec" (UID: "4454830a-59f9-4ece-8e5c-554b725015ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.444262 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities" (OuterVolumeSpecName: "utilities") pod "3c329326-860d-4eed-855d-e7811ee41819" (UID: "3c329326-860d-4eed-855d-e7811ee41819"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.444891 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities" (OuterVolumeSpecName: "utilities") pod "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" (UID: "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.447742 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr" (OuterVolumeSpecName: "kube-api-access-ddjjr") pod "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" (UID: "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60"). InnerVolumeSpecName "kube-api-access-ddjjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.448249 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf" (OuterVolumeSpecName: "kube-api-access-52lvf") pod "4454830a-59f9-4ece-8e5c-554b725015ec" (UID: "4454830a-59f9-4ece-8e5c-554b725015ec"). InnerVolumeSpecName "kube-api-access-52lvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.448322 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9" (OuterVolumeSpecName: "kube-api-access-zfcd9") pod "3c329326-860d-4eed-855d-e7811ee41819" (UID: "3c329326-860d-4eed-855d-e7811ee41819"). InnerVolumeSpecName "kube-api-access-zfcd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.473313 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.476635 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.508713 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4454830a-59f9-4ece-8e5c-554b725015ec" (UID: "4454830a-59f9-4ece-8e5c-554b725015ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.517220 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c329326-860d-4eed-855d-e7811ee41819" (UID: "3c329326-860d-4eed-855d-e7811ee41819"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.530771 4849 generic.go:334] "Generic (PLEG): container finished" podID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerID="03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92" exitCode=0 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.530822 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jvk79" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.530827 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerDied","Data":"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.530930 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jvk79" event={"ID":"b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60","Type":"ContainerDied","Data":"fce386904306390b56ec223e5eab5635331b7d7f4b17eee2c0ec65cd7b573fdb"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.530947 4849 scope.go:117] "RemoveContainer" containerID="03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.532424 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c329326-860d-4eed-855d-e7811ee41819" containerID="e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe" exitCode=0 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.532497 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerDied","Data":"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.532528 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cwdxv" event={"ID":"3c329326-860d-4eed-855d-e7811ee41819","Type":"ContainerDied","Data":"3ebdc20a1c69ddf42e51f27476064c442a187ff3e2bf207ec25f844ca5d239aa"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.532611 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cwdxv" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.533555 4849 generic.go:334] "Generic (PLEG): container finished" podID="975a1cda-589e-4583-a601-b2a1eba69a16" containerID="94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed" exitCode=0 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.533584 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.533599 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" event={"ID":"975a1cda-589e-4583-a601-b2a1eba69a16","Type":"ContainerDied","Data":"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.533616 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-z2mdp" event={"ID":"975a1cda-589e-4583-a601-b2a1eba69a16","Type":"ContainerDied","Data":"263ed43668460be7b335116729a7d7abd74b39547dcac7a9c43a3594e08c87a3"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.536308 4849 generic.go:334] "Generic (PLEG): container finished" podID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerID="a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237" exitCode=0 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.536345 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerDied","Data":"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.536362 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5j2b" event={"ID":"2fbb5c1c-5b96-4563-be16-83f73dece6aa","Type":"ContainerDied","Data":"ecec2f1d72adba43d4ce6f4bfb623205eb5983d9acfe5ba72dfe75c539e91fb6"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.536407 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5j2b" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.538601 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" (UID: "b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.541171 4849 generic.go:334] "Generic (PLEG): container finished" podID="4454830a-59f9-4ece-8e5c-554b725015ec" containerID="8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08" exitCode=0 Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.541199 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerDied","Data":"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.541213 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6rcps" event={"ID":"4454830a-59f9-4ece-8e5c-554b725015ec","Type":"ContainerDied","Data":"c6340a35976ec2e2d4e2eb4629762a66ce0c6f6d7f76179504c505b65b84f4ad"} Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.541251 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6rcps" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.544919 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52lvf\" (UniqueName: \"kubernetes.io/projected/4454830a-59f9-4ece-8e5c-554b725015ec-kube-api-access-52lvf\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.544965 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.544977 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.544985 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c329326-860d-4eed-855d-e7811ee41819-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.544995 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.545002 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfcd9\" (UniqueName: \"kubernetes.io/projected/3c329326-860d-4eed-855d-e7811ee41819-kube-api-access-zfcd9\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.545010 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddjjr\" (UniqueName: \"kubernetes.io/projected/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-kube-api-access-ddjjr\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.545017 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.545024 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4454830a-59f9-4ece-8e5c-554b725015ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.546937 4849 scope.go:117] "RemoveContainer" containerID="768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.556361 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.560235 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cwdxv"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.567593 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.569981 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6rcps"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.575771 4849 scope.go:117] "RemoveContainer" containerID="d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.584597 4849 scope.go:117] "RemoveContainer" containerID="03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.584870 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92\": container with ID starting with 03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92 not found: ID does not exist" containerID="03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.584903 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92"} err="failed to get container status \"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92\": rpc error: code = NotFound desc = could not find container \"03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92\": container with ID starting with 03a0bf68cbd0b9b49782081e2d11c129c551e45f4b012044d733184eb473eb92 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.584924 4849 scope.go:117] "RemoveContainer" containerID="768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.585186 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6\": container with ID starting with 768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6 not found: ID does not exist" containerID="768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.585213 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6"} err="failed to get container status \"768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6\": rpc error: code = NotFound desc = could not find container \"768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6\": container with ID starting with 768f55b2710300b7ef3ae935f7645cf176c8cf61026e21a117811cde90cdc2b6 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.585231 4849 scope.go:117] "RemoveContainer" containerID="d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.585444 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a\": container with ID starting with d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a not found: ID does not exist" containerID="d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.585463 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a"} err="failed to get container status \"d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a\": rpc error: code = NotFound desc = could not find container \"d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a\": container with ID starting with d7f92c642dd395195f01c25d4ebec7c1fafaa6c87584c28eaf3563b52314e91a not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.585476 4849 scope.go:117] "RemoveContainer" containerID="e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.595345 4849 scope.go:117] "RemoveContainer" containerID="fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.605797 4849 scope.go:117] "RemoveContainer" containerID="91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.615552 4849 scope.go:117] "RemoveContainer" containerID="e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.615841 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe\": container with ID starting with e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe not found: ID does not exist" containerID="e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.615864 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe"} err="failed to get container status \"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe\": rpc error: code = NotFound desc = could not find container \"e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe\": container with ID starting with e672e57d56d2e3c1beb038c620a11fba5a25a811b12d0dcfcf10fafc5676fbbe not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.615879 4849 scope.go:117] "RemoveContainer" containerID="fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.616119 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48\": container with ID starting with fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48 not found: ID does not exist" containerID="fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.616141 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48"} err="failed to get container status \"fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48\": rpc error: code = NotFound desc = could not find container \"fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48\": container with ID starting with fa87c12f08583c18561877a36b903bdc4904d572109d060cace569a375599b48 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.616153 4849 scope.go:117] "RemoveContainer" containerID="91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.616382 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe\": container with ID starting with 91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe not found: ID does not exist" containerID="91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.616401 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe"} err="failed to get container status \"91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe\": rpc error: code = NotFound desc = could not find container \"91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe\": container with ID starting with 91709dcec8acc50195b0ab095ada00e680810fb9797cb1b5cb82c183472a17fe not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.616412 4849 scope.go:117] "RemoveContainer" containerID="94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.624709 4849 scope.go:117] "RemoveContainer" containerID="94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.624915 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed\": container with ID starting with 94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed not found: ID does not exist" containerID="94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.624938 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed"} err="failed to get container status \"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed\": rpc error: code = NotFound desc = could not find container \"94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed\": container with ID starting with 94ed221023abc408df99af0cf16fd2de740a029e09584426fb6ea443e35c0fed not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.624952 4849 scope.go:117] "RemoveContainer" containerID="a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.633325 4849 scope.go:117] "RemoveContainer" containerID="f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.642748 4849 scope.go:117] "RemoveContainer" containerID="79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.645966 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") pod \"975a1cda-589e-4583-a601-b2a1eba69a16\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.646163 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") pod \"975a1cda-589e-4583-a601-b2a1eba69a16\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.646281 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7k5m\" (UniqueName: \"kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m\") pod \"975a1cda-589e-4583-a601-b2a1eba69a16\" (UID: \"975a1cda-589e-4583-a601-b2a1eba69a16\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.646461 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "975a1cda-589e-4583-a601-b2a1eba69a16" (UID: "975a1cda-589e-4583-a601-b2a1eba69a16"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.646839 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities\") pod \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.646940 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content\") pod \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.647110 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spcbn\" (UniqueName: \"kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn\") pod \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\" (UID: \"2fbb5c1c-5b96-4563-be16-83f73dece6aa\") " Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.647431 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities" (OuterVolumeSpecName: "utilities") pod "2fbb5c1c-5b96-4563-be16-83f73dece6aa" (UID: "2fbb5c1c-5b96-4563-be16-83f73dece6aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.647593 4849 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.647675 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.648625 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m" (OuterVolumeSpecName: "kube-api-access-t7k5m") pod "975a1cda-589e-4583-a601-b2a1eba69a16" (UID: "975a1cda-589e-4583-a601-b2a1eba69a16"). InnerVolumeSpecName "kube-api-access-t7k5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.648684 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "975a1cda-589e-4583-a601-b2a1eba69a16" (UID: "975a1cda-589e-4583-a601-b2a1eba69a16"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.648985 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn" (OuterVolumeSpecName: "kube-api-access-spcbn") pod "2fbb5c1c-5b96-4563-be16-83f73dece6aa" (UID: "2fbb5c1c-5b96-4563-be16-83f73dece6aa"). InnerVolumeSpecName "kube-api-access-spcbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.651359 4849 scope.go:117] "RemoveContainer" containerID="a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.651591 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237\": container with ID starting with a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237 not found: ID does not exist" containerID="a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.651615 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237"} err="failed to get container status \"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237\": rpc error: code = NotFound desc = could not find container \"a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237\": container with ID starting with a8639832a87b1a562145ac1457d429f3585114242767444d80da638bd37d8237 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.651631 4849 scope.go:117] "RemoveContainer" containerID="f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.651860 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969\": container with ID starting with f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969 not found: ID does not exist" containerID="f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.651964 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969"} err="failed to get container status \"f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969\": rpc error: code = NotFound desc = could not find container \"f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969\": container with ID starting with f638a6601d80ef43b6a0c27095b90c672da0d2907a869069734fe9cde01ac969 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.652040 4849 scope.go:117] "RemoveContainer" containerID="79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.652338 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b\": container with ID starting with 79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b not found: ID does not exist" containerID="79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.652361 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b"} err="failed to get container status \"79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b\": rpc error: code = NotFound desc = could not find container \"79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b\": container with ID starting with 79de5692f0aa48361ca4e218bf20713bd6a6d932babb24c5f688e0780ae9e53b not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.652378 4849 scope.go:117] "RemoveContainer" containerID="8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.660553 4849 scope.go:117] "RemoveContainer" containerID="1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.661346 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fbb5c1c-5b96-4563-be16-83f73dece6aa" (UID: "2fbb5c1c-5b96-4563-be16-83f73dece6aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.669667 4849 scope.go:117] "RemoveContainer" containerID="df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678064 4849 scope.go:117] "RemoveContainer" containerID="8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.678296 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08\": container with ID starting with 8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08 not found: ID does not exist" containerID="8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678322 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08"} err="failed to get container status \"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08\": rpc error: code = NotFound desc = could not find container \"8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08\": container with ID starting with 8d9b88d2c89a51333c213554ed813f4c38187ff2b1af66185abfdad71a2eea08 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678338 4849 scope.go:117] "RemoveContainer" containerID="1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.678534 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124\": container with ID starting with 1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124 not found: ID does not exist" containerID="1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678560 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124"} err="failed to get container status \"1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124\": rpc error: code = NotFound desc = could not find container \"1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124\": container with ID starting with 1bff27fd71845123c156f6ed14374384cfb0ab216d07aea0c3a90a06b7d44124 not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678576 4849 scope.go:117] "RemoveContainer" containerID="df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e" Dec 03 12:25:28 crc kubenswrapper[4849]: E1203 12:25:28.678885 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e\": container with ID starting with df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e not found: ID does not exist" containerID="df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.678909 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e"} err="failed to get container status \"df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e\": rpc error: code = NotFound desc = could not find container \"df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e\": container with ID starting with df55a181b56e824dd4de5374857214f29403c07266d972a1d953fa39340f2e7e not found: ID does not exist" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.748856 4849 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/975a1cda-589e-4583-a601-b2a1eba69a16-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.749106 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7k5m\" (UniqueName: \"kubernetes.io/projected/975a1cda-589e-4583-a601-b2a1eba69a16-kube-api-access-t7k5m\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.749181 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fbb5c1c-5b96-4563-be16-83f73dece6aa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.749235 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spcbn\" (UniqueName: \"kubernetes.io/projected/2fbb5c1c-5b96-4563-be16-83f73dece6aa-kube-api-access-spcbn\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.788179 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bk499"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.861662 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.864247 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-z2mdp"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.867871 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.870339 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jvk79"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.878044 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:25:28 crc kubenswrapper[4849]: I1203 12:25:28.879226 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5j2b"] Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.547806 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" event={"ID":"2079bd80-c5b1-42e4-b5ed-a8c7ba357882","Type":"ContainerStarted","Data":"85e16279eb792cc649f1767868ca3b6370497eda5027d4115d6539d1161449f9"} Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.548014 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" event={"ID":"2079bd80-c5b1-42e4-b5ed-a8c7ba357882","Type":"ContainerStarted","Data":"2b14df4dbca08ced5caad585c79bb4098e4ecf0630a7e1e48f9e1ca33ad8612b"} Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.548030 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.551859 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.559436 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bk499" podStartSLOduration=1.559425387 podStartE2EDuration="1.559425387s" podCreationTimestamp="2025-12-03 12:25:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:25:29.559195937 +0000 UTC m=+276.021043720" watchObservedRunningTime="2025-12-03 12:25:29.559425387 +0000 UTC m=+276.021273171" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.861009 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" path="/var/lib/kubelet/pods/2fbb5c1c-5b96-4563-be16-83f73dece6aa/volumes" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.861552 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c329326-860d-4eed-855d-e7811ee41819" path="/var/lib/kubelet/pods/3c329326-860d-4eed-855d-e7811ee41819/volumes" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.862082 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" path="/var/lib/kubelet/pods/4454830a-59f9-4ece-8e5c-554b725015ec/volumes" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.862617 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" path="/var/lib/kubelet/pods/975a1cda-589e-4583-a601-b2a1eba69a16/volumes" Dec 03 12:25:29 crc kubenswrapper[4849]: I1203 12:25:29.863015 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" path="/var/lib/kubelet/pods/b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60/volumes" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.373361 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.374070 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" podUID="27bd0071-f571-4860-b882-360fbb4fd181" containerName="controller-manager" containerID="cri-o://378ad15e10f1ce7f31b60022dcca7b94a5e22ae464705ba8570623767c258c52" gracePeriod=30 Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.483459 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.483634 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerName="route-controller-manager" containerID="cri-o://6fdd714fdf50b3af6c2db211cd10fd1f1ab43bca7ba08b847ff368f1691fe0c0" gracePeriod=30 Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.631192 4849 generic.go:334] "Generic (PLEG): container finished" podID="27bd0071-f571-4860-b882-360fbb4fd181" containerID="378ad15e10f1ce7f31b60022dcca7b94a5e22ae464705ba8570623767c258c52" exitCode=0 Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.631272 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" event={"ID":"27bd0071-f571-4860-b882-360fbb4fd181","Type":"ContainerDied","Data":"378ad15e10f1ce7f31b60022dcca7b94a5e22ae464705ba8570623767c258c52"} Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.632761 4849 generic.go:334] "Generic (PLEG): container finished" podID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerID="6fdd714fdf50b3af6c2db211cd10fd1f1ab43bca7ba08b847ff368f1691fe0c0" exitCode=0 Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.632800 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" event={"ID":"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413","Type":"ContainerDied","Data":"6fdd714fdf50b3af6c2db211cd10fd1f1ab43bca7ba08b847ff368f1691fe0c0"} Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.660009 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.805083 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.848213 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca\") pod \"27bd0071-f571-4860-b882-360fbb4fd181\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.848304 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert\") pod \"27bd0071-f571-4860-b882-360fbb4fd181\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.848334 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config\") pod \"27bd0071-f571-4860-b882-360fbb4fd181\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.848361 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tqt6\" (UniqueName: \"kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6\") pod \"27bd0071-f571-4860-b882-360fbb4fd181\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.848396 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles\") pod \"27bd0071-f571-4860-b882-360fbb4fd181\" (UID: \"27bd0071-f571-4860-b882-360fbb4fd181\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.849157 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config" (OuterVolumeSpecName: "config") pod "27bd0071-f571-4860-b882-360fbb4fd181" (UID: "27bd0071-f571-4860-b882-360fbb4fd181"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.849636 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca" (OuterVolumeSpecName: "client-ca") pod "27bd0071-f571-4860-b882-360fbb4fd181" (UID: "27bd0071-f571-4860-b882-360fbb4fd181"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.849776 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "27bd0071-f571-4860-b882-360fbb4fd181" (UID: "27bd0071-f571-4860-b882-360fbb4fd181"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.853026 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6" (OuterVolumeSpecName: "kube-api-access-5tqt6") pod "27bd0071-f571-4860-b882-360fbb4fd181" (UID: "27bd0071-f571-4860-b882-360fbb4fd181"). InnerVolumeSpecName "kube-api-access-5tqt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.853034 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "27bd0071-f571-4860-b882-360fbb4fd181" (UID: "27bd0071-f571-4860-b882-360fbb4fd181"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949261 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert\") pod \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949310 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnjwk\" (UniqueName: \"kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk\") pod \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949329 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca\") pod \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949368 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config\") pod \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\" (UID: \"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413\") " Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949567 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949584 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949592 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27bd0071-f571-4860-b882-360fbb4fd181-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949600 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27bd0071-f571-4860-b882-360fbb4fd181-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.949608 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tqt6\" (UniqueName: \"kubernetes.io/projected/27bd0071-f571-4860-b882-360fbb4fd181-kube-api-access-5tqt6\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.950110 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config" (OuterVolumeSpecName: "config") pod "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" (UID: "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.950338 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca" (OuterVolumeSpecName: "client-ca") pod "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" (UID: "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.951956 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk" (OuterVolumeSpecName: "kube-api-access-jnjwk") pod "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" (UID: "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413"). InnerVolumeSpecName "kube-api-access-jnjwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:25:49 crc kubenswrapper[4849]: I1203 12:25:49.952207 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" (UID: "cfa5e2db-9a31-40f0-90a5-a4f19c4a1413"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.050668 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.050699 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.050712 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnjwk\" (UniqueName: \"kubernetes.io/projected/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-kube-api-access-jnjwk\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.050723 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.637303 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" event={"ID":"cfa5e2db-9a31-40f0-90a5-a4f19c4a1413","Type":"ContainerDied","Data":"aae0445dfe23ae89429246fbaf897d0ee72ac837ffa2d66fd98c0348e198e38b"} Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.637349 4849 scope.go:117] "RemoveContainer" containerID="6fdd714fdf50b3af6c2db211cd10fd1f1ab43bca7ba08b847ff368f1691fe0c0" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.637439 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.642292 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" event={"ID":"27bd0071-f571-4860-b882-360fbb4fd181","Type":"ContainerDied","Data":"da40ae95129c9495501901d286daea7573995e0cf73563d0802f1ff74136bcb6"} Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.642348 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xk97z" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.652610 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.653382 4849 scope.go:117] "RemoveContainer" containerID="378ad15e10f1ce7f31b60022dcca7b94a5e22ae464705ba8570623767c258c52" Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.654441 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xk97z"] Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.662958 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:25:50 crc kubenswrapper[4849]: I1203 12:25:50.663007 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-v6w56"] Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.182881 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183079 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bd0071-f571-4860-b882-360fbb4fd181" containerName="controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183091 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bd0071-f571-4860-b882-360fbb4fd181" containerName="controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183101 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183106 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183112 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183117 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183124 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerName="route-controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183129 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerName="route-controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183135 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183140 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183149 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183155 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183163 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183168 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183177 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183182 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183191 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183195 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183204 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" containerName="marketplace-operator" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183209 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" containerName="marketplace-operator" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183217 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183222 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183227 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183232 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183240 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183244 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="extract-content" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183251 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183257 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: E1203 12:25:51.183264 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183269 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="extract-utilities" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183342 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="4454830a-59f9-4ece-8e5c-554b725015ec" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183350 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7fd00d2-e826-4a3b-b65c-61d0fc1dbb60" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183357 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="975a1cda-589e-4583-a601-b2a1eba69a16" containerName="marketplace-operator" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183364 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" containerName="route-controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183372 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="27bd0071-f571-4860-b882-360fbb4fd181" containerName="controller-manager" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183379 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c329326-860d-4eed-855d-e7811ee41819" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183385 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fbb5c1c-5b96-4563-be16-83f73dece6aa" containerName="registry-server" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.183673 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.185533 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.187137 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.187345 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.188920 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.189200 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.191277 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.195981 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.196850 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.198592 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.199661 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.199804 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.199895 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.200011 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.200397 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.200938 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.202067 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.208909 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261126 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkrjg\" (UniqueName: \"kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261162 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261187 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261209 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261232 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261247 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45jls\" (UniqueName: \"kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261263 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261288 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.261313 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.361979 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362027 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362048 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkrjg\" (UniqueName: \"kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362068 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362090 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362122 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362200 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362227 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45jls\" (UniqueName: \"kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.362244 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.363562 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.363800 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.364038 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.364166 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.364196 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.372194 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.372196 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.377156 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45jls\" (UniqueName: \"kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls\") pod \"route-controller-manager-5b7c946dfc-47sbw\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.390635 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkrjg\" (UniqueName: \"kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg\") pod \"controller-manager-c6f7bc5d-n29jf\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.495151 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.509327 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.656555 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.830521 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:25:51 crc kubenswrapper[4849]: W1203 12:25:51.835845 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a56dac2_0f90_4495_8781_01dd86fce481.slice/crio-5df653142648ed2088c49ea488b8c654d24c6dafa428027998037c5897e05f6e WatchSource:0}: Error finding container 5df653142648ed2088c49ea488b8c654d24c6dafa428027998037c5897e05f6e: Status 404 returned error can't find the container with id 5df653142648ed2088c49ea488b8c654d24c6dafa428027998037c5897e05f6e Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.862587 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27bd0071-f571-4860-b882-360fbb4fd181" path="/var/lib/kubelet/pods/27bd0071-f571-4860-b882-360fbb4fd181/volumes" Dec 03 12:25:51 crc kubenswrapper[4849]: I1203 12:25:51.863214 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfa5e2db-9a31-40f0-90a5-a4f19c4a1413" path="/var/lib/kubelet/pods/cfa5e2db-9a31-40f0-90a5-a4f19c4a1413/volumes" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.661874 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" event={"ID":"a8485d9b-fe87-41ab-b62e-82dce3136b32","Type":"ContainerStarted","Data":"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390"} Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.662095 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.662108 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" event={"ID":"a8485d9b-fe87-41ab-b62e-82dce3136b32","Type":"ContainerStarted","Data":"d91fc1b913cf71cfb551e800cd480ad771dcf57aa43a7af5fbeebd5ceebb6737"} Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.663418 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" event={"ID":"0a56dac2-0f90-4495-8781-01dd86fce481","Type":"ContainerStarted","Data":"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033"} Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.663453 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" event={"ID":"0a56dac2-0f90-4495-8781-01dd86fce481","Type":"ContainerStarted","Data":"5df653142648ed2088c49ea488b8c654d24c6dafa428027998037c5897e05f6e"} Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.663613 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.666129 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.666932 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.672440 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" podStartSLOduration=1.672430841 podStartE2EDuration="1.672430841s" podCreationTimestamp="2025-12-03 12:25:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:25:52.671982299 +0000 UTC m=+299.133830083" watchObservedRunningTime="2025-12-03 12:25:52.672430841 +0000 UTC m=+299.134278624" Dec 03 12:25:52 crc kubenswrapper[4849]: I1203 12:25:52.681858 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" podStartSLOduration=1.681846629 podStartE2EDuration="1.681846629s" podCreationTimestamp="2025-12-03 12:25:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:25:52.680968422 +0000 UTC m=+299.142816215" watchObservedRunningTime="2025-12-03 12:25:52.681846629 +0000 UTC m=+299.143694413" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.485965 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4"] Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.486840 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.489438 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"telemetry-config" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.489474 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-dockercfg-wwt9l" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.489710 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"cluster-monitoring-operator-tls" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.489870 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"openshift-service-ca.crt" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.490607 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-root-ca.crt" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.493791 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4"] Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.519479 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/bb7c63d4-1924-4be0-995f-e52386f1f5c3-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.519578 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/bb7c63d4-1924-4be0-995f-e52386f1f5c3-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.519685 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7gz8\" (UniqueName: \"kubernetes.io/projected/bb7c63d4-1924-4be0-995f-e52386f1f5c3-kube-api-access-w7gz8\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.620819 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/bb7c63d4-1924-4be0-995f-e52386f1f5c3-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.620873 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7gz8\" (UniqueName: \"kubernetes.io/projected/bb7c63d4-1924-4be0-995f-e52386f1f5c3-kube-api-access-w7gz8\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.620897 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/bb7c63d4-1924-4be0-995f-e52386f1f5c3-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.621712 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-config\" (UniqueName: \"kubernetes.io/configmap/bb7c63d4-1924-4be0-995f-e52386f1f5c3-telemetry-config\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.625324 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-monitoring-operator-tls\" (UniqueName: \"kubernetes.io/secret/bb7c63d4-1924-4be0-995f-e52386f1f5c3-cluster-monitoring-operator-tls\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.633335 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7gz8\" (UniqueName: \"kubernetes.io/projected/bb7c63d4-1924-4be0-995f-e52386f1f5c3-kube-api-access-w7gz8\") pod \"cluster-monitoring-operator-6d5b84845-ff2h4\" (UID: \"bb7c63d4-1924-4be0-995f-e52386f1f5c3\") " pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:56 crc kubenswrapper[4849]: I1203 12:25:56.803657 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.147949 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4"] Dec 03 12:25:57 crc kubenswrapper[4849]: W1203 12:25:57.152708 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb7c63d4_1924_4be0_995f_e52386f1f5c3.slice/crio-b1b61060a2033bdd3ccf54ba2b185537c27a8433ce63ca3876362bab9508415d WatchSource:0}: Error finding container b1b61060a2033bdd3ccf54ba2b185537c27a8433ce63ca3876362bab9508415d: Status 404 returned error can't find the container with id b1b61060a2033bdd3ccf54ba2b185537c27a8433ce63ca3876362bab9508415d Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.343669 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8fhxw"] Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.344477 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.345816 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.352250 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8fhxw"] Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.431422 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-utilities\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.431500 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8glj4\" (UniqueName: \"kubernetes.io/projected/a98469a9-b6c8-4fa3-a639-89059d0a4de8-kube-api-access-8glj4\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.431521 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-catalog-content\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.532754 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-utilities\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.533190 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-utilities\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.533470 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8glj4\" (UniqueName: \"kubernetes.io/projected/a98469a9-b6c8-4fa3-a639-89059d0a4de8-kube-api-access-8glj4\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.533610 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-catalog-content\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.533851 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a98469a9-b6c8-4fa3-a639-89059d0a4de8-catalog-content\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.541344 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dr9hn"] Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.542139 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.543713 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.547787 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dr9hn"] Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.549760 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8glj4\" (UniqueName: \"kubernetes.io/projected/a98469a9-b6c8-4fa3-a639-89059d0a4de8-kube-api-access-8glj4\") pod \"certified-operators-8fhxw\" (UID: \"a98469a9-b6c8-4fa3-a639-89059d0a4de8\") " pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.634628 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-utilities\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.634837 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-catalog-content\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.634906 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g2h9\" (UniqueName: \"kubernetes.io/projected/8f23f72f-8e85-4f2c-91ee-b11942536d44-kube-api-access-9g2h9\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.655478 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.680106 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" event={"ID":"bb7c63d4-1924-4be0-995f-e52386f1f5c3","Type":"ContainerStarted","Data":"b1b61060a2033bdd3ccf54ba2b185537c27a8433ce63ca3876362bab9508415d"} Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.736289 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g2h9\" (UniqueName: \"kubernetes.io/projected/8f23f72f-8e85-4f2c-91ee-b11942536d44-kube-api-access-9g2h9\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.736341 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-utilities\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.736423 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-catalog-content\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.737084 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-catalog-content\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.737311 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f23f72f-8e85-4f2c-91ee-b11942536d44-utilities\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.750476 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g2h9\" (UniqueName: \"kubernetes.io/projected/8f23f72f-8e85-4f2c-91ee-b11942536d44-kube-api-access-9g2h9\") pod \"community-operators-dr9hn\" (UID: \"8f23f72f-8e85-4f2c-91ee-b11942536d44\") " pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.852697 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:25:57 crc kubenswrapper[4849]: I1203 12:25:57.989505 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8fhxw"] Dec 03 12:25:57 crc kubenswrapper[4849]: W1203 12:25:57.994362 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda98469a9_b6c8_4fa3_a639_89059d0a4de8.slice/crio-dd8d1a6b2a979fa7aa532c436038b05feb14262c3c75d6f4badf980d53d960d9 WatchSource:0}: Error finding container dd8d1a6b2a979fa7aa532c436038b05feb14262c3c75d6f4badf980d53d960d9: Status 404 returned error can't find the container with id dd8d1a6b2a979fa7aa532c436038b05feb14262c3c75d6f4badf980d53d960d9 Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.180012 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dr9hn"] Dec 03 12:25:58 crc kubenswrapper[4849]: W1203 12:25:58.186039 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f23f72f_8e85_4f2c_91ee_b11942536d44.slice/crio-1ca24741c0ffecb215c7a515c4689aa67bf271195b45e9963f6439a9b5865c8b WatchSource:0}: Error finding container 1ca24741c0ffecb215c7a515c4689aa67bf271195b45e9963f6439a9b5865c8b: Status 404 returned error can't find the container with id 1ca24741c0ffecb215c7a515c4689aa67bf271195b45e9963f6439a9b5865c8b Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.685094 4849 generic.go:334] "Generic (PLEG): container finished" podID="8f23f72f-8e85-4f2c-91ee-b11942536d44" containerID="1984586083e2b4c1fec4e49d6fb8ae6baf5edebfd18569cbf511399933617019" exitCode=0 Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.685173 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dr9hn" event={"ID":"8f23f72f-8e85-4f2c-91ee-b11942536d44","Type":"ContainerDied","Data":"1984586083e2b4c1fec4e49d6fb8ae6baf5edebfd18569cbf511399933617019"} Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.685200 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dr9hn" event={"ID":"8f23f72f-8e85-4f2c-91ee-b11942536d44","Type":"ContainerStarted","Data":"1ca24741c0ffecb215c7a515c4689aa67bf271195b45e9963f6439a9b5865c8b"} Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.687217 4849 generic.go:334] "Generic (PLEG): container finished" podID="a98469a9-b6c8-4fa3-a639-89059d0a4de8" containerID="2f97d0489de95a23a6797e1c49e75f4ea0db310a72618e40ef341b36ca067efd" exitCode=0 Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.687244 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fhxw" event={"ID":"a98469a9-b6c8-4fa3-a639-89059d0a4de8","Type":"ContainerDied","Data":"2f97d0489de95a23a6797e1c49e75f4ea0db310a72618e40ef341b36ca067efd"} Dec 03 12:25:58 crc kubenswrapper[4849]: I1203 12:25:58.687259 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fhxw" event={"ID":"a98469a9-b6c8-4fa3-a639-89059d0a4de8","Type":"ContainerStarted","Data":"dd8d1a6b2a979fa7aa532c436038b05feb14262c3c75d6f4badf980d53d960d9"} Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.193274 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9"] Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.194074 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.195475 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-tls" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.196461 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-dockercfg-nssdq" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.199633 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9"] Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.253861 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.354750 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:25:59 crc kubenswrapper[4849]: E1203 12:25:59.354893 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:25:59 crc kubenswrapper[4849]: E1203 12:25:59.354969 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:25:59.854939572 +0000 UTC m=+306.316787355 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.692222 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" event={"ID":"bb7c63d4-1924-4be0-995f-e52386f1f5c3","Type":"ContainerStarted","Data":"2432aa901fdfdbe028320c8849cc3c37152f016b6ec1ed809b306b86a1cbcc2e"} Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.693970 4849 generic.go:334] "Generic (PLEG): container finished" podID="8f23f72f-8e85-4f2c-91ee-b11942536d44" containerID="5781097e090a94b780917ae87e4756f046ed85105db7b082fcb93c4f9f95c288" exitCode=0 Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.694001 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dr9hn" event={"ID":"8f23f72f-8e85-4f2c-91ee-b11942536d44","Type":"ContainerDied","Data":"5781097e090a94b780917ae87e4756f046ed85105db7b082fcb93c4f9f95c288"} Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.695569 4849 generic.go:334] "Generic (PLEG): container finished" podID="a98469a9-b6c8-4fa3-a639-89059d0a4de8" containerID="3087cd45185e3c32f962316e0f47e7e19f186f19166c49131d8c0805a483331d" exitCode=0 Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.695613 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fhxw" event={"ID":"a98469a9-b6c8-4fa3-a639-89059d0a4de8","Type":"ContainerDied","Data":"3087cd45185e3c32f962316e0f47e7e19f186f19166c49131d8c0805a483331d"} Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.707047 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/cluster-monitoring-operator-6d5b84845-ff2h4" podStartSLOduration=2.105535158 podStartE2EDuration="3.707035561s" podCreationTimestamp="2025-12-03 12:25:56 +0000 UTC" firstStartedPulling="2025-12-03 12:25:57.154568517 +0000 UTC m=+303.616416300" lastFinishedPulling="2025-12-03 12:25:58.756068919 +0000 UTC m=+305.217916703" observedRunningTime="2025-12-03 12:25:59.704974376 +0000 UTC m=+306.166822159" watchObservedRunningTime="2025-12-03 12:25:59.707035561 +0000 UTC m=+306.168883345" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.859167 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:25:59 crc kubenswrapper[4849]: E1203 12:25:59.859291 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:25:59 crc kubenswrapper[4849]: E1203 12:25:59.859338 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:26:00.859326061 +0000 UTC m=+307.321173845 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.943211 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g7tp4"] Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.944050 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.945262 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.950377 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7tp4"] Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.960065 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-utilities\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.960128 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mljbk\" (UniqueName: \"kubernetes.io/projected/d412054f-4ed5-4275-bfed-e6f2160d41ee-kube-api-access-mljbk\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:25:59 crc kubenswrapper[4849]: I1203 12:25:59.960252 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-catalog-content\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.061584 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-catalog-content\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.061667 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-utilities\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.061719 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mljbk\" (UniqueName: \"kubernetes.io/projected/d412054f-4ed5-4275-bfed-e6f2160d41ee-kube-api-access-mljbk\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.062033 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-catalog-content\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.062113 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d412054f-4ed5-4275-bfed-e6f2160d41ee-utilities\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.077593 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mljbk\" (UniqueName: \"kubernetes.io/projected/d412054f-4ed5-4275-bfed-e6f2160d41ee-kube-api-access-mljbk\") pod \"redhat-marketplace-g7tp4\" (UID: \"d412054f-4ed5-4275-bfed-e6f2160d41ee\") " pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.142058 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6sw5z"] Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.142900 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.144193 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.151504 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6sw5z"] Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.162662 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnn2w\" (UniqueName: \"kubernetes.io/projected/d9b0e814-298c-4693-874f-6687f99b49ef-kube-api-access-qnn2w\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.162800 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-utilities\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.162910 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-catalog-content\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.254587 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.263510 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-utilities\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.263741 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-catalog-content\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.263784 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnn2w\" (UniqueName: \"kubernetes.io/projected/d9b0e814-298c-4693-874f-6687f99b49ef-kube-api-access-qnn2w\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.263907 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-utilities\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.264136 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9b0e814-298c-4693-874f-6687f99b49ef-catalog-content\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.277370 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnn2w\" (UniqueName: \"kubernetes.io/projected/d9b0e814-298c-4693-874f-6687f99b49ef-kube-api-access-qnn2w\") pod \"redhat-operators-6sw5z\" (UID: \"d9b0e814-298c-4693-874f-6687f99b49ef\") " pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.453819 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.592125 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g7tp4"] Dec 03 12:26:00 crc kubenswrapper[4849]: W1203 12:26:00.597043 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd412054f_4ed5_4275_bfed_e6f2160d41ee.slice/crio-b9a16bd530322ec817528b930ac297b38afd097dce6f46f0c72142bf7fd27861 WatchSource:0}: Error finding container b9a16bd530322ec817528b930ac297b38afd097dce6f46f0c72142bf7fd27861: Status 404 returned error can't find the container with id b9a16bd530322ec817528b930ac297b38afd097dce6f46f0c72142bf7fd27861 Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.707108 4849 generic.go:334] "Generic (PLEG): container finished" podID="d412054f-4ed5-4275-bfed-e6f2160d41ee" containerID="cc708d8c0a3c98fb10c78cc5a6a0f9b5d2d3a36a788134018f2f7ca1db9fd7a4" exitCode=0 Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.707181 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7tp4" event={"ID":"d412054f-4ed5-4275-bfed-e6f2160d41ee","Type":"ContainerDied","Data":"cc708d8c0a3c98fb10c78cc5a6a0f9b5d2d3a36a788134018f2f7ca1db9fd7a4"} Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.707204 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7tp4" event={"ID":"d412054f-4ed5-4275-bfed-e6f2160d41ee","Type":"ContainerStarted","Data":"b9a16bd530322ec817528b930ac297b38afd097dce6f46f0c72142bf7fd27861"} Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.709237 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dr9hn" event={"ID":"8f23f72f-8e85-4f2c-91ee-b11942536d44","Type":"ContainerStarted","Data":"aa20f93da9d9b4c692788d71a72d54a604c37f3b7f4f71962de4ed1a6db72759"} Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.713344 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fhxw" event={"ID":"a98469a9-b6c8-4fa3-a639-89059d0a4de8","Type":"ContainerStarted","Data":"5868c252aeec3b1213ebb82c633ec1e939dc51f7ff65310bb8dbbef65cfd7127"} Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.734126 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8fhxw" podStartSLOduration=2.297212362 podStartE2EDuration="3.734109482s" podCreationTimestamp="2025-12-03 12:25:57 +0000 UTC" firstStartedPulling="2025-12-03 12:25:58.733373056 +0000 UTC m=+305.195220839" lastFinishedPulling="2025-12-03 12:26:00.170270176 +0000 UTC m=+306.632117959" observedRunningTime="2025-12-03 12:26:00.731889909 +0000 UTC m=+307.193737692" watchObservedRunningTime="2025-12-03 12:26:00.734109482 +0000 UTC m=+307.195957265" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.746759 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dr9hn" podStartSLOduration=2.151528691 podStartE2EDuration="3.746744136s" podCreationTimestamp="2025-12-03 12:25:57 +0000 UTC" firstStartedPulling="2025-12-03 12:25:58.733517347 +0000 UTC m=+305.195365120" lastFinishedPulling="2025-12-03 12:26:00.328732782 +0000 UTC m=+306.790580565" observedRunningTime="2025-12-03 12:26:00.745192386 +0000 UTC m=+307.207040169" watchObservedRunningTime="2025-12-03 12:26:00.746744136 +0000 UTC m=+307.208591919" Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.785197 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6sw5z"] Dec 03 12:26:00 crc kubenswrapper[4849]: W1203 12:26:00.789075 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9b0e814_298c_4693_874f_6687f99b49ef.slice/crio-a9530952119147fe21c7da913d04a020c814d9cf3c0223b9ef77c9d6dc2c819e WatchSource:0}: Error finding container a9530952119147fe21c7da913d04a020c814d9cf3c0223b9ef77c9d6dc2c819e: Status 404 returned error can't find the container with id a9530952119147fe21c7da913d04a020c814d9cf3c0223b9ef77c9d6dc2c819e Dec 03 12:26:00 crc kubenswrapper[4849]: I1203 12:26:00.871510 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:26:00 crc kubenswrapper[4849]: E1203 12:26:00.871683 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:00 crc kubenswrapper[4849]: E1203 12:26:00.871870 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:26:02.871854724 +0000 UTC m=+309.333702508 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:01 crc kubenswrapper[4849]: I1203 12:26:01.719229 4849 generic.go:334] "Generic (PLEG): container finished" podID="d412054f-4ed5-4275-bfed-e6f2160d41ee" containerID="14363a4eec3bb912a3b052ca97f936e32b3e068ee36f279368932dab4d6bfd30" exitCode=0 Dec 03 12:26:01 crc kubenswrapper[4849]: I1203 12:26:01.719286 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7tp4" event={"ID":"d412054f-4ed5-4275-bfed-e6f2160d41ee","Type":"ContainerDied","Data":"14363a4eec3bb912a3b052ca97f936e32b3e068ee36f279368932dab4d6bfd30"} Dec 03 12:26:01 crc kubenswrapper[4849]: I1203 12:26:01.721516 4849 generic.go:334] "Generic (PLEG): container finished" podID="d9b0e814-298c-4693-874f-6687f99b49ef" containerID="5835ebba0940de26adc762e141f9dda313837b0fd36bf923ebb7407f08d47bee" exitCode=0 Dec 03 12:26:01 crc kubenswrapper[4849]: I1203 12:26:01.721606 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sw5z" event={"ID":"d9b0e814-298c-4693-874f-6687f99b49ef","Type":"ContainerDied","Data":"5835ebba0940de26adc762e141f9dda313837b0fd36bf923ebb7407f08d47bee"} Dec 03 12:26:01 crc kubenswrapper[4849]: I1203 12:26:01.721637 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sw5z" event={"ID":"d9b0e814-298c-4693-874f-6687f99b49ef","Type":"ContainerStarted","Data":"a9530952119147fe21c7da913d04a020c814d9cf3c0223b9ef77c9d6dc2c819e"} Dec 03 12:26:02 crc kubenswrapper[4849]: I1203 12:26:02.727623 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g7tp4" event={"ID":"d412054f-4ed5-4275-bfed-e6f2160d41ee","Type":"ContainerStarted","Data":"ce110dc8842e34bbab220028c3b3d8adc6055f40705fcf0d4a5b3797d15e370a"} Dec 03 12:26:02 crc kubenswrapper[4849]: I1203 12:26:02.729158 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sw5z" event={"ID":"d9b0e814-298c-4693-874f-6687f99b49ef","Type":"ContainerStarted","Data":"c66c99e46ec2d8e93ee5c99d095d48ecf6a38a699d04eac6a8d924d060d9028a"} Dec 03 12:26:02 crc kubenswrapper[4849]: I1203 12:26:02.743281 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g7tp4" podStartSLOduration=2.24491543 podStartE2EDuration="3.743269135s" podCreationTimestamp="2025-12-03 12:25:59 +0000 UTC" firstStartedPulling="2025-12-03 12:26:00.708416287 +0000 UTC m=+307.170264070" lastFinishedPulling="2025-12-03 12:26:02.206769993 +0000 UTC m=+308.668617775" observedRunningTime="2025-12-03 12:26:02.741918753 +0000 UTC m=+309.203766535" watchObservedRunningTime="2025-12-03 12:26:02.743269135 +0000 UTC m=+309.205116918" Dec 03 12:26:02 crc kubenswrapper[4849]: I1203 12:26:02.891319 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:26:02 crc kubenswrapper[4849]: E1203 12:26:02.891480 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:02 crc kubenswrapper[4849]: E1203 12:26:02.891550 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:26:06.891534717 +0000 UTC m=+313.353382501 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:03 crc kubenswrapper[4849]: I1203 12:26:03.733918 4849 generic.go:334] "Generic (PLEG): container finished" podID="d9b0e814-298c-4693-874f-6687f99b49ef" containerID="c66c99e46ec2d8e93ee5c99d095d48ecf6a38a699d04eac6a8d924d060d9028a" exitCode=0 Dec 03 12:26:03 crc kubenswrapper[4849]: I1203 12:26:03.734005 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sw5z" event={"ID":"d9b0e814-298c-4693-874f-6687f99b49ef","Type":"ContainerDied","Data":"c66c99e46ec2d8e93ee5c99d095d48ecf6a38a699d04eac6a8d924d060d9028a"} Dec 03 12:26:04 crc kubenswrapper[4849]: I1203 12:26:04.739758 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sw5z" event={"ID":"d9b0e814-298c-4693-874f-6687f99b49ef","Type":"ContainerStarted","Data":"e102f848468f528f2e0a264fa0049c2810f9df31cae2deed765cb2cda23dbb52"} Dec 03 12:26:04 crc kubenswrapper[4849]: I1203 12:26:04.750828 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6sw5z" podStartSLOduration=2.239028639 podStartE2EDuration="4.750813849s" podCreationTimestamp="2025-12-03 12:26:00 +0000 UTC" firstStartedPulling="2025-12-03 12:26:01.722439945 +0000 UTC m=+308.184287729" lastFinishedPulling="2025-12-03 12:26:04.234225155 +0000 UTC m=+310.696072939" observedRunningTime="2025-12-03 12:26:04.749880348 +0000 UTC m=+311.211728121" watchObservedRunningTime="2025-12-03 12:26:04.750813849 +0000 UTC m=+311.212661632" Dec 03 12:26:06 crc kubenswrapper[4849]: I1203 12:26:06.935077 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:26:06 crc kubenswrapper[4849]: E1203 12:26:06.935283 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:06 crc kubenswrapper[4849]: E1203 12:26:06.935362 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:26:14.935343675 +0000 UTC m=+321.397191468 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.655692 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.655743 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.683267 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.773287 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8fhxw" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.853358 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.853397 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:26:07 crc kubenswrapper[4849]: I1203 12:26:07.878472 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:26:08 crc kubenswrapper[4849]: I1203 12:26:08.779432 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dr9hn" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.363869 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.364060 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" podUID="a8485d9b-fe87-41ab-b62e-82dce3136b32" containerName="route-controller-manager" containerID="cri-o://48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390" gracePeriod=30 Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.748894 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.757731 4849 generic.go:334] "Generic (PLEG): container finished" podID="a8485d9b-fe87-41ab-b62e-82dce3136b32" containerID="48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390" exitCode=0 Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.757767 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" event={"ID":"a8485d9b-fe87-41ab-b62e-82dce3136b32","Type":"ContainerDied","Data":"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390"} Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.757754 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.757791 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw" event={"ID":"a8485d9b-fe87-41ab-b62e-82dce3136b32","Type":"ContainerDied","Data":"d91fc1b913cf71cfb551e800cd480ad771dcf57aa43a7af5fbeebd5ceebb6737"} Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.757806 4849 scope.go:117] "RemoveContainer" containerID="48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766050 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config\") pod \"a8485d9b-fe87-41ab-b62e-82dce3136b32\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766099 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45jls\" (UniqueName: \"kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls\") pod \"a8485d9b-fe87-41ab-b62e-82dce3136b32\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766172 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert\") pod \"a8485d9b-fe87-41ab-b62e-82dce3136b32\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766202 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca\") pod \"a8485d9b-fe87-41ab-b62e-82dce3136b32\" (UID: \"a8485d9b-fe87-41ab-b62e-82dce3136b32\") " Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766806 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca" (OuterVolumeSpecName: "client-ca") pod "a8485d9b-fe87-41ab-b62e-82dce3136b32" (UID: "a8485d9b-fe87-41ab-b62e-82dce3136b32"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.766855 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config" (OuterVolumeSpecName: "config") pod "a8485d9b-fe87-41ab-b62e-82dce3136b32" (UID: "a8485d9b-fe87-41ab-b62e-82dce3136b32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.769634 4849 scope.go:117] "RemoveContainer" containerID="48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.770148 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls" (OuterVolumeSpecName: "kube-api-access-45jls") pod "a8485d9b-fe87-41ab-b62e-82dce3136b32" (UID: "a8485d9b-fe87-41ab-b62e-82dce3136b32"). InnerVolumeSpecName "kube-api-access-45jls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.770888 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a8485d9b-fe87-41ab-b62e-82dce3136b32" (UID: "a8485d9b-fe87-41ab-b62e-82dce3136b32"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:26:09 crc kubenswrapper[4849]: E1203 12:26:09.772572 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390\": container with ID starting with 48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390 not found: ID does not exist" containerID="48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.772609 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390"} err="failed to get container status \"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390\": rpc error: code = NotFound desc = could not find container \"48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390\": container with ID starting with 48594fea44d4108899b1bb6277a087c81ed33d97de1bca53808d00f377c2e390 not found: ID does not exist" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.867194 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8485d9b-fe87-41ab-b62e-82dce3136b32-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.867220 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.867229 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8485d9b-fe87-41ab-b62e-82dce3136b32-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:09 crc kubenswrapper[4849]: I1203 12:26:09.867237 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45jls\" (UniqueName: \"kubernetes.io/projected/a8485d9b-fe87-41ab-b62e-82dce3136b32-kube-api-access-45jls\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.073962 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.078355 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b7c946dfc-47sbw"] Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.255054 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.255120 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.282119 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.453945 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.454029 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.479991 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.787147 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6sw5z" Dec 03 12:26:10 crc kubenswrapper[4849]: I1203 12:26:10.788145 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g7tp4" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.320201 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw"] Dec 03 12:26:11 crc kubenswrapper[4849]: E1203 12:26:11.320377 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8485d9b-fe87-41ab-b62e-82dce3136b32" containerName="route-controller-manager" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.320390 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8485d9b-fe87-41ab-b62e-82dce3136b32" containerName="route-controller-manager" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.320482 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8485d9b-fe87-41ab-b62e-82dce3136b32" containerName="route-controller-manager" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.321018 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.322335 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.323031 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.323045 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.323234 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.323234 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.323237 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.330164 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw"] Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.385382 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0fde94-f746-429b-a3f4-9d63f94c244d-serving-cert\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.385563 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-config\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.385690 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmhxc\" (UniqueName: \"kubernetes.io/projected/4e0fde94-f746-429b-a3f4-9d63f94c244d-kube-api-access-bmhxc\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.385815 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-client-ca\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.486669 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-config\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.486714 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmhxc\" (UniqueName: \"kubernetes.io/projected/4e0fde94-f746-429b-a3f4-9d63f94c244d-kube-api-access-bmhxc\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.486753 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-client-ca\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.486780 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0fde94-f746-429b-a3f4-9d63f94c244d-serving-cert\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.487749 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-client-ca\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.487816 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0fde94-f746-429b-a3f4-9d63f94c244d-config\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.492156 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0fde94-f746-429b-a3f4-9d63f94c244d-serving-cert\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.499720 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmhxc\" (UniqueName: \"kubernetes.io/projected/4e0fde94-f746-429b-a3f4-9d63f94c244d-kube-api-access-bmhxc\") pod \"route-controller-manager-5ffcf8c575-2cmhw\" (UID: \"4e0fde94-f746-429b-a3f4-9d63f94c244d\") " pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.632078 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.862039 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8485d9b-fe87-41ab-b62e-82dce3136b32" path="/var/lib/kubelet/pods/a8485d9b-fe87-41ab-b62e-82dce3136b32/volumes" Dec 03 12:26:11 crc kubenswrapper[4849]: I1203 12:26:11.958192 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw"] Dec 03 12:26:11 crc kubenswrapper[4849]: W1203 12:26:11.962087 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e0fde94_f746_429b_a3f4_9d63f94c244d.slice/crio-b2f0ab100e1c534d0bd0571bf9cdb96d13e3df825a7188a156b5e5a4fe66dedd WatchSource:0}: Error finding container b2f0ab100e1c534d0bd0571bf9cdb96d13e3df825a7188a156b5e5a4fe66dedd: Status 404 returned error can't find the container with id b2f0ab100e1c534d0bd0571bf9cdb96d13e3df825a7188a156b5e5a4fe66dedd Dec 03 12:26:12 crc kubenswrapper[4849]: I1203 12:26:12.772170 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" event={"ID":"4e0fde94-f746-429b-a3f4-9d63f94c244d","Type":"ContainerStarted","Data":"cf3cdf12c7964585c274c29ce6c94d3031e6c289588c4c670136049ac662c532"} Dec 03 12:26:12 crc kubenswrapper[4849]: I1203 12:26:12.772398 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" event={"ID":"4e0fde94-f746-429b-a3f4-9d63f94c244d","Type":"ContainerStarted","Data":"b2f0ab100e1c534d0bd0571bf9cdb96d13e3df825a7188a156b5e5a4fe66dedd"} Dec 03 12:26:12 crc kubenswrapper[4849]: I1203 12:26:12.787310 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" podStartSLOduration=3.787298912 podStartE2EDuration="3.787298912s" podCreationTimestamp="2025-12-03 12:26:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:26:12.783996179 +0000 UTC m=+319.245843962" watchObservedRunningTime="2025-12-03 12:26:12.787298912 +0000 UTC m=+319.249146696" Dec 03 12:26:13 crc kubenswrapper[4849]: I1203 12:26:13.775505 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:13 crc kubenswrapper[4849]: I1203 12:26:13.778941 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5ffcf8c575-2cmhw" Dec 03 12:26:15 crc kubenswrapper[4849]: I1203 12:26:15.027764 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:26:15 crc kubenswrapper[4849]: E1203 12:26:15.027936 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:15 crc kubenswrapper[4849]: E1203 12:26:15.028321 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:26:31.028305863 +0000 UTC m=+337.490153646 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:22 crc kubenswrapper[4849]: I1203 12:26:22.677214 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:26:22 crc kubenswrapper[4849]: I1203 12:26:22.677586 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.364545 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.364894 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" podUID="0a56dac2-0f90-4495-8781-01dd86fce481" containerName="controller-manager" containerID="cri-o://3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033" gracePeriod=30 Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.722218 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.778268 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles\") pod \"0a56dac2-0f90-4495-8781-01dd86fce481\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.778306 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert\") pod \"0a56dac2-0f90-4495-8781-01dd86fce481\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.778324 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config\") pod \"0a56dac2-0f90-4495-8781-01dd86fce481\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.778967 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0a56dac2-0f90-4495-8781-01dd86fce481" (UID: "0a56dac2-0f90-4495-8781-01dd86fce481"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.779048 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config" (OuterVolumeSpecName: "config") pod "0a56dac2-0f90-4495-8781-01dd86fce481" (UID: "0a56dac2-0f90-4495-8781-01dd86fce481"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.779080 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca\") pod \"0a56dac2-0f90-4495-8781-01dd86fce481\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.779175 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkrjg\" (UniqueName: \"kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg\") pod \"0a56dac2-0f90-4495-8781-01dd86fce481\" (UID: \"0a56dac2-0f90-4495-8781-01dd86fce481\") " Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.779358 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca" (OuterVolumeSpecName: "client-ca") pod "0a56dac2-0f90-4495-8781-01dd86fce481" (UID: "0a56dac2-0f90-4495-8781-01dd86fce481"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.780051 4849 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.780070 4849 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.780082 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a56dac2-0f90-4495-8781-01dd86fce481-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.782435 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg" (OuterVolumeSpecName: "kube-api-access-pkrjg") pod "0a56dac2-0f90-4495-8781-01dd86fce481" (UID: "0a56dac2-0f90-4495-8781-01dd86fce481"). InnerVolumeSpecName "kube-api-access-pkrjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.783035 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0a56dac2-0f90-4495-8781-01dd86fce481" (UID: "0a56dac2-0f90-4495-8781-01dd86fce481"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.842307 4849 generic.go:334] "Generic (PLEG): container finished" podID="0a56dac2-0f90-4495-8781-01dd86fce481" containerID="3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033" exitCode=0 Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.842348 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" event={"ID":"0a56dac2-0f90-4495-8781-01dd86fce481","Type":"ContainerDied","Data":"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033"} Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.842394 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" event={"ID":"0a56dac2-0f90-4495-8781-01dd86fce481","Type":"ContainerDied","Data":"5df653142648ed2088c49ea488b8c654d24c6dafa428027998037c5897e05f6e"} Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.842412 4849 scope.go:117] "RemoveContainer" containerID="3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.842681 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c6f7bc5d-n29jf" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.853327 4849 scope.go:117] "RemoveContainer" containerID="3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033" Dec 03 12:26:29 crc kubenswrapper[4849]: E1203 12:26:29.853624 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033\": container with ID starting with 3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033 not found: ID does not exist" containerID="3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.853682 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033"} err="failed to get container status \"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033\": rpc error: code = NotFound desc = could not find container \"3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033\": container with ID starting with 3fc8df77dffd64e409b7e04c07ff2caf59c8909f60fa05d646f6cd82ba647033 not found: ID does not exist" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.861623 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.864273 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-c6f7bc5d-n29jf"] Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.881184 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkrjg\" (UniqueName: \"kubernetes.io/projected/0a56dac2-0f90-4495-8781-01dd86fce481-kube-api-access-pkrjg\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:29 crc kubenswrapper[4849]: I1203 12:26:29.881207 4849 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a56dac2-0f90-4495-8781-01dd86fce481-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.094149 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:26:31 crc kubenswrapper[4849]: E1203 12:26:31.094303 4849 secret.go:188] Couldn't get secret openshift-monitoring/prometheus-operator-admission-webhook-tls: secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:31 crc kubenswrapper[4849]: E1203 12:26:31.094410 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates podName:9e2af5f8-eeef-4e12-8676-447a4431f9eb nodeName:}" failed. No retries permitted until 2025-12-03 12:27:03.094395903 +0000 UTC m=+369.556243685 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "tls-certificates" (UniqueName: "kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates") pod "prometheus-operator-admission-webhook-f54c54754-srfn9" (UID: "9e2af5f8-eeef-4e12-8676-447a4431f9eb") : secret "prometheus-operator-admission-webhook-tls" not found Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.333500 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-fd796fc-xn78b"] Dec 03 12:26:31 crc kubenswrapper[4849]: E1203 12:26:31.333684 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a56dac2-0f90-4495-8781-01dd86fce481" containerName="controller-manager" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.333695 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a56dac2-0f90-4495-8781-01dd86fce481" containerName="controller-manager" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.333793 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a56dac2-0f90-4495-8781-01dd86fce481" containerName="controller-manager" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.334090 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.335835 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.336000 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.336047 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.336109 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.336141 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.337071 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.340666 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.341546 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fd796fc-xn78b"] Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.398007 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/edf80d7f-72eb-43ac-a16e-53f01e835c19-serving-cert\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.398310 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv7cj\" (UniqueName: \"kubernetes.io/projected/edf80d7f-72eb-43ac-a16e-53f01e835c19-kube-api-access-lv7cj\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.398438 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-proxy-ca-bundles\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.398587 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-config\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.398686 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-client-ca\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.499460 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv7cj\" (UniqueName: \"kubernetes.io/projected/edf80d7f-72eb-43ac-a16e-53f01e835c19-kube-api-access-lv7cj\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.499514 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-proxy-ca-bundles\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.499559 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-config\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.499584 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-client-ca\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.499618 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/edf80d7f-72eb-43ac-a16e-53f01e835c19-serving-cert\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.500850 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-client-ca\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.501059 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-config\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.501214 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/edf80d7f-72eb-43ac-a16e-53f01e835c19-proxy-ca-bundles\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.503493 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/edf80d7f-72eb-43ac-a16e-53f01e835c19-serving-cert\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.512106 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv7cj\" (UniqueName: \"kubernetes.io/projected/edf80d7f-72eb-43ac-a16e-53f01e835c19-kube-api-access-lv7cj\") pod \"controller-manager-fd796fc-xn78b\" (UID: \"edf80d7f-72eb-43ac-a16e-53f01e835c19\") " pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.646710 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.864385 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a56dac2-0f90-4495-8781-01dd86fce481" path="/var/lib/kubelet/pods/0a56dac2-0f90-4495-8781-01dd86fce481/volumes" Dec 03 12:26:31 crc kubenswrapper[4849]: I1203 12:26:31.996287 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fd796fc-xn78b"] Dec 03 12:26:32 crc kubenswrapper[4849]: W1203 12:26:32.002683 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedf80d7f_72eb_43ac_a16e_53f01e835c19.slice/crio-d0d6394f24cd0770172923605c56aecbad6bcdc066c39294a71b711d6b8168bf WatchSource:0}: Error finding container d0d6394f24cd0770172923605c56aecbad6bcdc066c39294a71b711d6b8168bf: Status 404 returned error can't find the container with id d0d6394f24cd0770172923605c56aecbad6bcdc066c39294a71b711d6b8168bf Dec 03 12:26:32 crc kubenswrapper[4849]: I1203 12:26:32.856307 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" event={"ID":"edf80d7f-72eb-43ac-a16e-53f01e835c19","Type":"ContainerStarted","Data":"96a63149b5276024a65118b02c0a6a5329e70d2ed896f63d0ad736c595e3802e"} Dec 03 12:26:32 crc kubenswrapper[4849]: I1203 12:26:32.856345 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" event={"ID":"edf80d7f-72eb-43ac-a16e-53f01e835c19","Type":"ContainerStarted","Data":"d0d6394f24cd0770172923605c56aecbad6bcdc066c39294a71b711d6b8168bf"} Dec 03 12:26:32 crc kubenswrapper[4849]: I1203 12:26:32.856574 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:32 crc kubenswrapper[4849]: I1203 12:26:32.859959 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" Dec 03 12:26:32 crc kubenswrapper[4849]: I1203 12:26:32.869947 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-fd796fc-xn78b" podStartSLOduration=3.869936323 podStartE2EDuration="3.869936323s" podCreationTimestamp="2025-12-03 12:26:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:26:32.868127472 +0000 UTC m=+339.329975255" watchObservedRunningTime="2025-12-03 12:26:32.869936323 +0000 UTC m=+339.331784106" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.844821 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2gbl9"] Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.845379 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.852586 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2gbl9"] Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936292 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936558 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pb77\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-kube-api-access-7pb77\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936577 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-trusted-ca\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936614 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936719 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-bound-sa-token\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.936911 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-tls\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.937086 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.937132 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-certificates\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:34 crc kubenswrapper[4849]: I1203 12:26:34.954221 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.037902 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pb77\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-kube-api-access-7pb77\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.037951 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-trusted-ca\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.037999 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.038070 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-bound-sa-token\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.038093 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-tls\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.038138 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-certificates\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.038203 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.038609 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.039420 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-trusted-ca\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.039571 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-certificates\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.042792 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.042883 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-registry-tls\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.051904 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pb77\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-kube-api-access-7pb77\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.052011 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2d59297a-66ea-4d7e-ad78-4c2b3ead0029-bound-sa-token\") pod \"image-registry-66df7c8f76-2gbl9\" (UID: \"2d59297a-66ea-4d7e-ad78-4c2b3ead0029\") " pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.158058 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.490638 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2gbl9"] Dec 03 12:26:35 crc kubenswrapper[4849]: W1203 12:26:35.496961 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d59297a_66ea_4d7e_ad78_4c2b3ead0029.slice/crio-5c541a0c1d679590f3be228c9a26ce5991c819f2a06d1d265f9c63b2d874acda WatchSource:0}: Error finding container 5c541a0c1d679590f3be228c9a26ce5991c819f2a06d1d265f9c63b2d874acda: Status 404 returned error can't find the container with id 5c541a0c1d679590f3be228c9a26ce5991c819f2a06d1d265f9c63b2d874acda Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.868367 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" event={"ID":"2d59297a-66ea-4d7e-ad78-4c2b3ead0029","Type":"ContainerStarted","Data":"c241a0c51871765077c10ae1d1a0b080d7c166b79e935a197459857124ada9fd"} Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.868403 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" event={"ID":"2d59297a-66ea-4d7e-ad78-4c2b3ead0029","Type":"ContainerStarted","Data":"5c541a0c1d679590f3be228c9a26ce5991c819f2a06d1d265f9c63b2d874acda"} Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.868499 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:35 crc kubenswrapper[4849]: I1203 12:26:35.883224 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" podStartSLOduration=1.883210861 podStartE2EDuration="1.883210861s" podCreationTimestamp="2025-12-03 12:26:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:26:35.881353769 +0000 UTC m=+342.343201552" watchObservedRunningTime="2025-12-03 12:26:35.883210861 +0000 UTC m=+342.345058644" Dec 03 12:26:52 crc kubenswrapper[4849]: I1203 12:26:52.677303 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:26:52 crc kubenswrapper[4849]: I1203 12:26:52.677614 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:26:55 crc kubenswrapper[4849]: I1203 12:26:55.162290 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-2gbl9" Dec 03 12:26:55 crc kubenswrapper[4849]: I1203 12:26:55.193446 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.155656 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.160503 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-certificates\" (UniqueName: \"kubernetes.io/secret/9e2af5f8-eeef-4e12-8676-447a4431f9eb-tls-certificates\") pod \"prometheus-operator-admission-webhook-f54c54754-srfn9\" (UID: \"9e2af5f8-eeef-4e12-8676-447a4431f9eb\") " pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.409217 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-admission-webhook-dockercfg-nssdq" Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.418519 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.746228 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9"] Dec 03 12:27:03 crc kubenswrapper[4849]: I1203 12:27:03.977993 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" event={"ID":"9e2af5f8-eeef-4e12-8676-447a4431f9eb","Type":"ContainerStarted","Data":"eebc241b63ca64dab0e333d9246aa2a266df847c7f628e4e05616bb394a9d3ec"} Dec 03 12:27:05 crc kubenswrapper[4849]: I1203 12:27:05.987280 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" event={"ID":"9e2af5f8-eeef-4e12-8676-447a4431f9eb","Type":"ContainerStarted","Data":"673819ba189ae87ed2e673567f9b400614c79101d344f1214b36ac561d4fbff9"} Dec 03 12:27:05 crc kubenswrapper[4849]: I1203 12:27:05.987505 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:27:05 crc kubenswrapper[4849]: I1203 12:27:05.991190 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" Dec 03 12:27:05 crc kubenswrapper[4849]: I1203 12:27:05.999162 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-admission-webhook-f54c54754-srfn9" podStartSLOduration=65.661454137 podStartE2EDuration="1m6.999151812s" podCreationTimestamp="2025-12-03 12:25:59 +0000 UTC" firstStartedPulling="2025-12-03 12:27:03.755018828 +0000 UTC m=+370.216866611" lastFinishedPulling="2025-12-03 12:27:05.092716503 +0000 UTC m=+371.554564286" observedRunningTime="2025-12-03 12:27:05.99748696 +0000 UTC m=+372.459334744" watchObservedRunningTime="2025-12-03 12:27:05.999151812 +0000 UTC m=+372.460999595" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.233538 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-rt24m"] Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.234259 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.236086 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-tls" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.236265 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-dockercfg-t8ghv" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.236401 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-client-ca" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.237170 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-operator-kube-rbac-proxy-config" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.242235 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-rt24m"] Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.292305 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/26f304b6-79dc-4802-8885-ed50613f0547-metrics-client-ca\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.292356 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgwfl\" (UniqueName: \"kubernetes.io/projected/26f304b6-79dc-4802-8885-ed50613f0547-kube-api-access-pgwfl\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.292466 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.292494 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.393163 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/26f304b6-79dc-4802-8885-ed50613f0547-metrics-client-ca\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.393216 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgwfl\" (UniqueName: \"kubernetes.io/projected/26f304b6-79dc-4802-8885-ed50613f0547-kube-api-access-pgwfl\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.393295 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.393318 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.394011 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/26f304b6-79dc-4802-8885-ed50613f0547-metrics-client-ca\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.397918 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-kube-rbac-proxy-config\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.398033 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-operator-tls\" (UniqueName: \"kubernetes.io/secret/26f304b6-79dc-4802-8885-ed50613f0547-prometheus-operator-tls\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.406188 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgwfl\" (UniqueName: \"kubernetes.io/projected/26f304b6-79dc-4802-8885-ed50613f0547-kube-api-access-pgwfl\") pod \"prometheus-operator-db54df47d-rt24m\" (UID: \"26f304b6-79dc-4802-8885-ed50613f0547\") " pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.549407 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.880305 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-operator-db54df47d-rt24m"] Dec 03 12:27:06 crc kubenswrapper[4849]: I1203 12:27:06.991950 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" event={"ID":"26f304b6-79dc-4802-8885-ed50613f0547","Type":"ContainerStarted","Data":"ed03c4cc19d2ff2343b61171162eb2bca5f8ff6619f90b8d2563f755cebe28ca"} Dec 03 12:27:09 crc kubenswrapper[4849]: I1203 12:27:09.001074 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" event={"ID":"26f304b6-79dc-4802-8885-ed50613f0547","Type":"ContainerStarted","Data":"c02308c1cc64782aa41d43442d03718a9c4d19f0ee20bd912afaddb9dae4b560"} Dec 03 12:27:09 crc kubenswrapper[4849]: I1203 12:27:09.001254 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" event={"ID":"26f304b6-79dc-4802-8885-ed50613f0547","Type":"ContainerStarted","Data":"95aa2f67842e40e21592900c2eee4e03c8d07a047ec69697c455d3878b0bb26b"} Dec 03 12:27:09 crc kubenswrapper[4849]: I1203 12:27:09.013748 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-operator-db54df47d-rt24m" podStartSLOduration=1.772065837 podStartE2EDuration="3.013734442s" podCreationTimestamp="2025-12-03 12:27:06 +0000 UTC" firstStartedPulling="2025-12-03 12:27:06.885637227 +0000 UTC m=+373.347485010" lastFinishedPulling="2025-12-03 12:27:08.127305832 +0000 UTC m=+374.589153615" observedRunningTime="2025-12-03 12:27:09.011537301 +0000 UTC m=+375.473385084" watchObservedRunningTime="2025-12-03 12:27:09.013734442 +0000 UTC m=+375.475582225" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.529628 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-526x6"] Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.530710 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.532994 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-kube-rbac-proxy-config" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.533277 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-dockercfg-l68jv" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.533304 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"openshift-state-metrics-tls" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.535759 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.535791 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvh8g\" (UniqueName: \"kubernetes.io/projected/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-kube-api-access-fvh8g\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.535939 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.536013 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.545826 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-526x6"] Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.562175 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn"] Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.563179 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.565460 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-tls" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.565626 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-dockercfg-8rjg9" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.565901 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-state-metrics-kube-rbac-proxy-config" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.566039 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kube-state-metrics-custom-resource-state-configmap" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.573042 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn"] Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.577022 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/node-exporter-bqhld"] Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.577870 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.579286 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-kube-rbac-proxy-config" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.579764 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-dockercfg-knllz" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.579887 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"node-exporter-tls" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.637043 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.637125 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.637170 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.637190 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvh8g\" (UniqueName: \"kubernetes.io/projected/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-kube-api-access-fvh8g\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: E1203 12:27:10.637303 4849 secret.go:188] Couldn't get secret openshift-monitoring/openshift-state-metrics-tls: secret "openshift-state-metrics-tls" not found Dec 03 12:27:10 crc kubenswrapper[4849]: E1203 12:27:10.637426 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls podName:7551d5ce-b9eb-4a51-b2e6-337f39b52a92 nodeName:}" failed. No retries permitted until 2025-12-03 12:27:11.137411612 +0000 UTC m=+377.599259396 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "openshift-state-metrics-tls" (UniqueName: "kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls") pod "openshift-state-metrics-566fddb674-526x6" (UID: "7551d5ce-b9eb-4a51-b2e6-337f39b52a92") : secret "openshift-state-metrics-tls" not found Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.638115 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-metrics-client-ca\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.642355 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-kube-rbac-proxy-config\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.649257 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvh8g\" (UniqueName: \"kubernetes.io/projected/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-kube-api-access-fvh8g\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737855 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjmdx\" (UniqueName: \"kubernetes.io/projected/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-kube-api-access-hjmdx\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737893 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hgp9\" (UniqueName: \"kubernetes.io/projected/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-api-access-8hgp9\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737919 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-textfile\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737934 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737948 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-metrics-client-ca\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.737971 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-wtmp\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738432 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738484 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738531 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738558 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738656 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-tls\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738701 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738725 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-sys\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.738747 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-root\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839698 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839757 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839780 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839833 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-tls\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839867 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839885 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-sys\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839909 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"root\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-root\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839960 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjmdx\" (UniqueName: \"kubernetes.io/projected/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-kube-api-access-hjmdx\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.839979 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hgp9\" (UniqueName: \"kubernetes.io/projected/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-api-access-8hgp9\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840003 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-textfile\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840001 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-sys\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840096 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"root\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-root\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840020 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840243 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-metrics-client-ca\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840287 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-wtmp\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840324 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840442 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-wtmp\" (UniqueName: \"kubernetes.io/host-path/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-wtmp\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840463 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"volume-directive-shadow\" (UniqueName: \"kubernetes.io/empty-dir/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-volume-directive-shadow\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840474 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-textfile\" (UniqueName: \"kubernetes.io/empty-dir/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-textfile\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840798 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-metrics-client-ca\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840879 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-custom-resource-state-configmap\" (UniqueName: \"kubernetes.io/configmap/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-custom-resource-state-configmap\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.840974 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-metrics-client-ca\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.842292 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-tls\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-tls\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.842305 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-tls\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.843215 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-exporter-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-node-exporter-kube-rbac-proxy-config\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.843247 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-kube-rbac-proxy-config\" (UniqueName: \"kubernetes.io/secret/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-state-metrics-kube-rbac-proxy-config\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.853678 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjmdx\" (UniqueName: \"kubernetes.io/projected/0777f8f5-ed89-42d1-9d8e-3d5d08b68abe-kube-api-access-hjmdx\") pod \"node-exporter-bqhld\" (UID: \"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe\") " pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.854221 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hgp9\" (UniqueName: \"kubernetes.io/projected/235021e1-0e3d-4ef4-87a7-ef1f93831c0f-kube-api-access-8hgp9\") pod \"kube-state-metrics-777cb5bd5d-n7hbn\" (UID: \"235021e1-0e3d-4ef4-87a7-ef1f93831c0f\") " pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.877082 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" Dec 03 12:27:10 crc kubenswrapper[4849]: I1203 12:27:10.888326 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/node-exporter-bqhld" Dec 03 12:27:10 crc kubenswrapper[4849]: W1203 12:27:10.906513 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0777f8f5_ed89_42d1_9d8e_3d5d08b68abe.slice/crio-a2a5c76c31c6bba216d9cea56deb2c88fb0b89e35ad418fedb45a03177150e57 WatchSource:0}: Error finding container a2a5c76c31c6bba216d9cea56deb2c88fb0b89e35ad418fedb45a03177150e57: Status 404 returned error can't find the container with id a2a5c76c31c6bba216d9cea56deb2c88fb0b89e35ad418fedb45a03177150e57 Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.009046 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-bqhld" event={"ID":"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe","Type":"ContainerStarted","Data":"a2a5c76c31c6bba216d9cea56deb2c88fb0b89e35ad418fedb45a03177150e57"} Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.142447 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.146674 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-state-metrics-tls\" (UniqueName: \"kubernetes.io/secret/7551d5ce-b9eb-4a51-b2e6-337f39b52a92-openshift-state-metrics-tls\") pod \"openshift-state-metrics-566fddb674-526x6\" (UID: \"7551d5ce-b9eb-4a51-b2e6-337f39b52a92\") " pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.225020 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn"] Dec 03 12:27:11 crc kubenswrapper[4849]: W1203 12:27:11.230031 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod235021e1_0e3d_4ef4_87a7_ef1f93831c0f.slice/crio-b59b166d5b3340c79c1084383627463dedfe23db2103a9068a04c87b4398ec78 WatchSource:0}: Error finding container b59b166d5b3340c79c1084383627463dedfe23db2103a9068a04c87b4398ec78: Status 404 returned error can't find the container with id b59b166d5b3340c79c1084383627463dedfe23db2103a9068a04c87b4398ec78 Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.442472 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.694676 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.696228 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.697836 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls-assets-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.697938 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.698278 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-generated" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.698278 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-dockercfg-4bvkx" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.698322 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-web" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.698798 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-kube-rbac-proxy-metric" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.701148 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-tls" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.701215 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"alertmanager-main-web-config" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.710139 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"alertmanager-trusted-ca-bundle" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.717566 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.813087 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/openshift-state-metrics-566fddb674-526x6"] Dec 03 12:27:11 crc kubenswrapper[4849]: W1203 12:27:11.820215 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7551d5ce_b9eb_4a51_b2e6_337f39b52a92.slice/crio-72ff3fecbddb87fc25a2eb1378bc18cd7986f6d4279e08e6d72ed433d510c033 WatchSource:0}: Error finding container 72ff3fecbddb87fc25a2eb1378bc18cd7986f6d4279e08e6d72ed433d510c033: Status 404 returned error can't find the container with id 72ff3fecbddb87fc25a2eb1378bc18cd7986f6d4279e08e6d72ed433d510c033 Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849227 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849276 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849297 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849332 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849350 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849367 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-volume\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849382 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-tls-assets\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849406 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849444 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849464 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-web-config\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849502 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-out\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.849518 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t699g\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-kube-api-access-t699g\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951693 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951768 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951799 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-web-config\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951907 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-out\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951929 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t699g\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-kube-api-access-t699g\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.951978 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952009 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952024 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952043 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952061 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952105 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-volume\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952121 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-tls-assets\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: E1203 12:27:11.952809 4849 secret.go:188] Couldn't get secret openshift-monitoring/alertmanager-main-tls: secret "alertmanager-main-tls" not found Dec 03 12:27:11 crc kubenswrapper[4849]: E1203 12:27:11.952856 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls podName:d6a51fa4-023a-423a-9afa-df77b13b69f8 nodeName:}" failed. No retries permitted until 2025-12-03 12:27:12.45284282 +0000 UTC m=+378.914690603 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-alertmanager-main-tls" (UniqueName: "kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls") pod "alertmanager-main-0" (UID: "d6a51fa4-023a-423a-9afa-df77b13b69f8") : secret "alertmanager-main-tls" not found Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.952980 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-metrics-client-ca\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.953526 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-trusted-ca-bundle\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.954471 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-main-db\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-alertmanager-main-db\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.956634 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-web-config\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.957109 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-out\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.957257 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-tls-assets\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.965240 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-web\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.966290 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.966621 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t699g\" (UniqueName: \"kubernetes.io/projected/d6a51fa4-023a-423a-9afa-df77b13b69f8-kube-api-access-t699g\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.972480 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-config-volume\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:11 crc kubenswrapper[4849]: I1203 12:27:11.981875 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-kube-rbac-proxy-metric\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-kube-rbac-proxy-metric\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.016531 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" event={"ID":"235021e1-0e3d-4ef4-87a7-ef1f93831c0f","Type":"ContainerStarted","Data":"b59b166d5b3340c79c1084383627463dedfe23db2103a9068a04c87b4398ec78"} Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.017448 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" event={"ID":"7551d5ce-b9eb-4a51-b2e6-337f39b52a92","Type":"ContainerStarted","Data":"72ff3fecbddb87fc25a2eb1378bc18cd7986f6d4279e08e6d72ed433d510c033"} Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.458234 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.462469 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-alertmanager-main-tls\" (UniqueName: \"kubernetes.io/secret/d6a51fa4-023a-423a-9afa-df77b13b69f8-secret-alertmanager-main-tls\") pod \"alertmanager-main-0\" (UID: \"d6a51fa4-023a-423a-9afa-df77b13b69f8\") " pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.608974 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/alertmanager-main-0" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.622879 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw"] Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.624237 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.626358 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.626950 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-rules" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.626965 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-dockercfg-x5bsk" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.626950 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-grpc-tls-eepaiicf1nq57" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.629006 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-web" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.629182 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-kube-rbac-proxy-metrics" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.629361 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"thanos-querier-tls" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.637182 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw"] Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.761939 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762120 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-grpc-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762233 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762297 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-metrics-client-ca\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762313 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knxzk\" (UniqueName: \"kubernetes.io/projected/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-kube-api-access-knxzk\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762517 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762565 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.762632 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863355 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863553 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863588 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-grpc-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863623 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863666 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-metrics-client-ca\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863680 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knxzk\" (UniqueName: \"kubernetes.io/projected/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-kube-api-access-knxzk\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863745 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.863781 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.864536 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-metrics-client-ca\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.867514 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-grpc-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.867677 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-tls\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-tls\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.868936 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.869111 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-web\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.869170 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-metrics\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-metrics\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.870226 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-thanos-querier-kube-rbac-proxy-rules\" (UniqueName: \"kubernetes.io/secret/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-secret-thanos-querier-kube-rbac-proxy-rules\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.876722 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knxzk\" (UniqueName: \"kubernetes.io/projected/1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd-kube-api-access-knxzk\") pod \"thanos-querier-d8cbc9c9f-rzkbw\" (UID: \"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd\") " pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:12 crc kubenswrapper[4849]: I1203 12:27:12.938737 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.028555 4849 generic.go:334] "Generic (PLEG): container finished" podID="0777f8f5-ed89-42d1-9d8e-3d5d08b68abe" containerID="ce47ddc7fdf0da6c7698c3236d3ae02a0d33d8d2f43a3c19019d21e565fe510c" exitCode=0 Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.028599 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-bqhld" event={"ID":"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe","Type":"ContainerDied","Data":"ce47ddc7fdf0da6c7698c3236d3ae02a0d33d8d2f43a3c19019d21e565fe510c"} Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.034222 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" event={"ID":"235021e1-0e3d-4ef4-87a7-ef1f93831c0f","Type":"ContainerStarted","Data":"2504ff9111683a87ba2a254553b5f898d686de361a05f4e19e1898c192470ee6"} Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.034282 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" event={"ID":"235021e1-0e3d-4ef4-87a7-ef1f93831c0f","Type":"ContainerStarted","Data":"570067131c02042a42286a6bff29b860d768b37c5a1b64e4123a90d376b2ed39"} Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.036024 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" event={"ID":"7551d5ce-b9eb-4a51-b2e6-337f39b52a92","Type":"ContainerStarted","Data":"24328c1501a5724fb20cab314e4a92d95a4094b1d51835197489b65af2decddc"} Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.036051 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" event={"ID":"7551d5ce-b9eb-4a51-b2e6-337f39b52a92","Type":"ContainerStarted","Data":"badb669c2839c4f4f132a06dc0ad3f74b9111bc27a75b116791913b6d24c3bcf"} Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.131189 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/alertmanager-main-0"] Dec 03 12:27:13 crc kubenswrapper[4849]: W1203 12:27:13.137891 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6a51fa4_023a_423a_9afa_df77b13b69f8.slice/crio-3a9ba4264a8a9b8438f3e4b581c82a01a06665e3c665e76a0c84d2aaf401aa70 WatchSource:0}: Error finding container 3a9ba4264a8a9b8438f3e4b581c82a01a06665e3c665e76a0c84d2aaf401aa70: Status 404 returned error can't find the container with id 3a9ba4264a8a9b8438f3e4b581c82a01a06665e3c665e76a0c84d2aaf401aa70 Dec 03 12:27:13 crc kubenswrapper[4849]: I1203 12:27:13.316317 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw"] Dec 03 12:27:13 crc kubenswrapper[4849]: W1203 12:27:13.510611 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c5cf079_d2c7_4d52_af0c_d3e460ac6bcd.slice/crio-b1c6a94e7f0e61d7c50029976da243ae3a9664c2caa40209b8525a332d416671 WatchSource:0}: Error finding container b1c6a94e7f0e61d7c50029976da243ae3a9664c2caa40209b8525a332d416671: Status 404 returned error can't find the container with id b1c6a94e7f0e61d7c50029976da243ae3a9664c2caa40209b8525a332d416671 Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.044722 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-bqhld" event={"ID":"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe","Type":"ContainerStarted","Data":"8886864e4cebdb65a740f6effa4cd0396ac8525447b80ec1ca013e32d6fc7fac"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.045371 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/node-exporter-bqhld" event={"ID":"0777f8f5-ed89-42d1-9d8e-3d5d08b68abe","Type":"ContainerStarted","Data":"3d7a2e86f506c06c9a7a592acec310a4d3ea99a058aaa9f8d583b4d8cb855d91"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.046455 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" event={"ID":"235021e1-0e3d-4ef4-87a7-ef1f93831c0f","Type":"ContainerStarted","Data":"f0ce51f7867442e5fdaa637ca30f14268813c95516f057f9a8186a55864cba86"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.048398 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" event={"ID":"7551d5ce-b9eb-4a51-b2e6-337f39b52a92","Type":"ContainerStarted","Data":"2d3a2d9bb81d0c8a7503d8b5dbb005ab5874520d5dfb8a2e95159ecc1ff4bbc0"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.049442 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"b1c6a94e7f0e61d7c50029976da243ae3a9664c2caa40209b8525a332d416671"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.050244 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"3a9ba4264a8a9b8438f3e4b581c82a01a06665e3c665e76a0c84d2aaf401aa70"} Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.057994 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/node-exporter-bqhld" podStartSLOduration=2.849090535 podStartE2EDuration="4.057984465s" podCreationTimestamp="2025-12-03 12:27:10 +0000 UTC" firstStartedPulling="2025-12-03 12:27:10.908107217 +0000 UTC m=+377.369955000" lastFinishedPulling="2025-12-03 12:27:12.117001157 +0000 UTC m=+378.578848930" observedRunningTime="2025-12-03 12:27:14.055135071 +0000 UTC m=+380.516982854" watchObservedRunningTime="2025-12-03 12:27:14.057984465 +0000 UTC m=+380.519832247" Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.068102 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/kube-state-metrics-777cb5bd5d-n7hbn" podStartSLOduration=2.516237694 podStartE2EDuration="4.068088113s" podCreationTimestamp="2025-12-03 12:27:10 +0000 UTC" firstStartedPulling="2025-12-03 12:27:11.231734096 +0000 UTC m=+377.693581880" lastFinishedPulling="2025-12-03 12:27:12.783584516 +0000 UTC m=+379.245432299" observedRunningTime="2025-12-03 12:27:14.066543977 +0000 UTC m=+380.528391760" watchObservedRunningTime="2025-12-03 12:27:14.068088113 +0000 UTC m=+380.529935896" Dec 03 12:27:14 crc kubenswrapper[4849]: I1203 12:27:14.078557 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/openshift-state-metrics-566fddb674-526x6" podStartSLOduration=2.743992226 podStartE2EDuration="4.078547327s" podCreationTimestamp="2025-12-03 12:27:10 +0000 UTC" firstStartedPulling="2025-12-03 12:27:12.191324191 +0000 UTC m=+378.653171973" lastFinishedPulling="2025-12-03 12:27:13.525879292 +0000 UTC m=+379.987727074" observedRunningTime="2025-12-03 12:27:14.077843347 +0000 UTC m=+380.539691130" watchObservedRunningTime="2025-12-03 12:27:14.078547327 +0000 UTC m=+380.540395110" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.056638 4849 generic.go:334] "Generic (PLEG): container finished" podID="d6a51fa4-023a-423a-9afa-df77b13b69f8" containerID="3148801185c0453f0813f157fa2a4a3f85c13f2790f62065639b55018ac2e60d" exitCode=0 Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.057834 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerDied","Data":"3148801185c0453f0813f157fa2a4a3f85c13f2790f62065639b55018ac2e60d"} Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.336807 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.337441 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.347050 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.497855 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.497939 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.497959 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.497982 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.497997 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9bx5\" (UniqueName: \"kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.498064 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.498080 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599378 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599455 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599510 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599527 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599548 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599573 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9bx5\" (UniqueName: \"kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.599634 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.600926 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.601080 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.601139 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.601155 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.603843 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.604207 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.614628 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9bx5\" (UniqueName: \"kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5\") pod \"console-7dcb9bcd7d-whztg\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.648334 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.935583 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/metrics-server-74b988cdf6-hhs6s"] Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.938539 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.943704 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-74b988cdf6-hhs6s"] Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.943965 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-client-certs" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.945280 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"metrics-server-audit-profiles" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.945723 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"kubelet-serving-ca-bundle" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.945948 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-tls" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.946141 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-dockercfg-hvfrh" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.946508 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"metrics-server-bhdotb5rr342t" Dec 03 12:27:15 crc kubenswrapper[4849]: I1203 12:27:15.979107 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:27:15 crc kubenswrapper[4849]: W1203 12:27:15.985845 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9abd75da_eefb_44ec_a9e0_53459780442c.slice/crio-f0b676158296e4f1679d8b87cf74b678e3f96c1099f179792ba9557b044ccac3 WatchSource:0}: Error finding container f0b676158296e4f1679d8b87cf74b678e3f96c1099f179792ba9557b044ccac3: Status 404 returned error can't find the container with id f0b676158296e4f1679d8b87cf74b678e3f96c1099f179792ba9557b044ccac3 Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.065500 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"78ceb43bed0da10040a2d44bbec1a9657e259b2914381e1d694936ae73b22d00"} Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.065538 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"0043476c1e0b40042f749727d73de0ab9b84f37ea5dcb37d62cff8c4cc2ce5f4"} Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.065582 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"456142223c0be79e4fcaa32a6d35a29bec49ab06d66febaa06c161d469fd00fb"} Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.066689 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcb9bcd7d-whztg" event={"ID":"9abd75da-eefb-44ec-a9e0-53459780442c","Type":"ContainerStarted","Data":"f0b676158296e4f1679d8b87cf74b678e3f96c1099f179792ba9557b044ccac3"} Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.105107 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-server-tls\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.105254 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-client-certs\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.105305 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-metrics-server-audit-profiles\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.105326 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-client-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.105352 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kc8z\" (UniqueName: \"kubernetes.io/projected/32477d8e-7cc5-4387-a07d-5707f4e85db0-kube-api-access-5kc8z\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.106659 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.106733 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/32477d8e-7cc5-4387-a07d-5707f4e85db0-audit-log\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207636 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-client-certs\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207694 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-metrics-server-audit-profiles\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207713 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-client-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207732 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kc8z\" (UniqueName: \"kubernetes.io/projected/32477d8e-7cc5-4387-a07d-5707f4e85db0-kube-api-access-5kc8z\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207790 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207830 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/32477d8e-7cc5-4387-a07d-5707f4e85db0-audit-log\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.207857 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-server-tls\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.208360 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-log\" (UniqueName: \"kubernetes.io/empty-dir/32477d8e-7cc5-4387-a07d-5707f4e85db0-audit-log\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.208522 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-configmap-kubelet-serving-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.208767 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-server-audit-profiles\" (UniqueName: \"kubernetes.io/configmap/32477d8e-7cc5-4387-a07d-5707f4e85db0-metrics-server-audit-profiles\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.212041 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-server-tls\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-server-tls\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.212151 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-client-ca-bundle\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.214954 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/32477d8e-7cc5-4387-a07d-5707f4e85db0-secret-metrics-client-certs\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.220679 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kc8z\" (UniqueName: \"kubernetes.io/projected/32477d8e-7cc5-4387-a07d-5707f4e85db0-kube-api-access-5kc8z\") pod \"metrics-server-74b988cdf6-hhs6s\" (UID: \"32477d8e-7cc5-4387-a07d-5707f4e85db0\") " pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.256358 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.333349 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/monitoring-plugin-7c7f599858-kkshk"] Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.335302 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.336214 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7c7f599858-kkshk"] Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.337018 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"default-dockercfg-6tstp" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.337387 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"monitoring-plugin-cert" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.511376 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba-monitoring-plugin-cert\") pod \"monitoring-plugin-7c7f599858-kkshk\" (UID: \"5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba\") " pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.612604 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/metrics-server-74b988cdf6-hhs6s"] Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.613246 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba-monitoring-plugin-cert\") pod \"monitoring-plugin-7c7f599858-kkshk\" (UID: \"5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba\") " pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.621927 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"monitoring-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba-monitoring-plugin-cert\") pod \"monitoring-plugin-7c7f599858-kkshk\" (UID: \"5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba\") " pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.651953 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.712801 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.715834 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.718789 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"kube-rbac-proxy" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.718904 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-sidecar-tls" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.718994 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"serving-certs-ca-bundle" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.719155 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-web-config" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.719240 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-kube-rbac-proxy-web" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.719357 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-thanos-prometheus-http-client-file" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.719437 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.719584 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls-assets-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.720027 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-dockercfg-htvzb" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.720053 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-grpc-tls-ftq5o8efrts0c" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.720205 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-monitoring"/"prometheus-k8s-tls" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.724264 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-k8s-rulefiles-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.727133 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.731470 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-monitoring"/"prometheus-trusted-ca-bundle" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918176 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918233 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918253 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918394 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918456 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918494 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918543 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918626 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918696 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-config-out\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918710 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918772 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918807 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918866 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918884 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918904 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv9f6\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-kube-api-access-nv9f6\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.918937 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.919016 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:16 crc kubenswrapper[4849]: I1203 12:27:16.919054 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-web-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019592 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019652 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019679 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-config-out\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019693 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019711 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019728 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019750 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019765 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019866 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv9f6\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-kube-api-access-nv9f6\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019886 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019908 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.019928 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-web-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020245 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020269 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020284 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020302 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020340 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020728 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-kubelet-serving-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-kubelet-serving-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.020873 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-trusted-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.021091 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-metrics-client-ca\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-metrics-client-ca\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.021155 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-db\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-db\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.021499 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-configmap-serving-certs-ca-bundle\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.023260 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-web-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.023455 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.023487 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f4598e2f-d0cd-4eff-9082-09bde218d043-config-out\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.023613 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-metrics-client-certs\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-metrics-client-certs\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.024110 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-grpc-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-grpc-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.024209 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-kube-rbac-proxy\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-kube-rbac-proxy\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.024417 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-config\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.025049 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-thanos-prometheus-http-client-file\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.025369 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-thanos-sidecar-tls\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-thanos-sidecar-tls\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.025104 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-tls-assets\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.027915 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-prometheus-k8s-kube-rbac-proxy-web\" (UniqueName: \"kubernetes.io/secret/f4598e2f-d0cd-4eff-9082-09bde218d043-secret-prometheus-k8s-kube-rbac-proxy-web\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.029766 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-k8s-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f4598e2f-d0cd-4eff-9082-09bde218d043-prometheus-k8s-rulefiles-0\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.032953 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv9f6\" (UniqueName: \"kubernetes.io/projected/f4598e2f-d0cd-4eff-9082-09bde218d043-kube-api-access-nv9f6\") pod \"prometheus-k8s-0\" (UID: \"f4598e2f-d0cd-4eff-9082-09bde218d043\") " pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.073010 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" event={"ID":"32477d8e-7cc5-4387-a07d-5707f4e85db0","Type":"ContainerStarted","Data":"155618e88aec739a4c3e231748c2451b0495dd068774159d665efd087b734fef"} Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.075861 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcb9bcd7d-whztg" event={"ID":"9abd75da-eefb-44ec-a9e0-53459780442c","Type":"ContainerStarted","Data":"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b"} Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.089679 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7dcb9bcd7d-whztg" podStartSLOduration=2.089663308 podStartE2EDuration="2.089663308s" podCreationTimestamp="2025-12-03 12:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:27:17.089446261 +0000 UTC m=+383.551294044" watchObservedRunningTime="2025-12-03 12:27:17.089663308 +0000 UTC m=+383.551511091" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.332672 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.559515 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/monitoring-plugin-7c7f599858-kkshk"] Dec 03 12:27:17 crc kubenswrapper[4849]: W1203 12:27:17.565877 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b9f23b5_06da_4c4b_8b8d_7bfa8a9018ba.slice/crio-0c3c5ad5382bc68e59fe8e540691ee0806a4d6d4ba7f6ca98e38d3102a43afdb WatchSource:0}: Error finding container 0c3c5ad5382bc68e59fe8e540691ee0806a4d6d4ba7f6ca98e38d3102a43afdb: Status 404 returned error can't find the container with id 0c3c5ad5382bc68e59fe8e540691ee0806a4d6d4ba7f6ca98e38d3102a43afdb Dec 03 12:27:17 crc kubenswrapper[4849]: I1203 12:27:17.684917 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-monitoring/prometheus-k8s-0"] Dec 03 12:27:17 crc kubenswrapper[4849]: W1203 12:27:17.692179 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4598e2f_d0cd_4eff_9082_09bde218d043.slice/crio-f3b3b3e5e7fb471ca6d6e120d293e4152bf50bb944951f84d7fc2576e13bab69 WatchSource:0}: Error finding container f3b3b3e5e7fb471ca6d6e120d293e4152bf50bb944951f84d7fc2576e13bab69: Status 404 returned error can't find the container with id f3b3b3e5e7fb471ca6d6e120d293e4152bf50bb944951f84d7fc2576e13bab69 Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.082450 4849 generic.go:334] "Generic (PLEG): container finished" podID="f4598e2f-d0cd-4eff-9082-09bde218d043" containerID="0eca5d9aa8eb408e72515cf410c1ffaff059f4b74dd5bbb5c699b0aa29b0eb16" exitCode=0 Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.082554 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerDied","Data":"0eca5d9aa8eb408e72515cf410c1ffaff059f4b74dd5bbb5c699b0aa29b0eb16"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.082744 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"f3b3b3e5e7fb471ca6d6e120d293e4152bf50bb944951f84d7fc2576e13bab69"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.084880 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" event={"ID":"5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba","Type":"ContainerStarted","Data":"0c3c5ad5382bc68e59fe8e540691ee0806a4d6d4ba7f6ca98e38d3102a43afdb"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.094625 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"f6853e45147da5dfd385ff3d16aa0eff61c87826c25da3db46cbd50f6ab344f7"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.094681 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"3d0905ab1ca512c5f4ebee914ab1f151dcbfa21400f4995fcfb40fd6b280da2f"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.094693 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" event={"ID":"1c5cf079-d2c7-4d52-af0c-d3e460ac6bcd","Type":"ContainerStarted","Data":"dbeed7742db6364b1c0102d4058c398685ba9633a92dd415eaf0e3bbcdf415a1"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.095280 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098040 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"0b05aba0c46b3b2fdca4ccf34a51ab12be0c016a36f7fd223cbc7118d55a363e"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098087 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"7e662f7ccc08603b1ba529c3962dd60cb1c66c7016e70d1cbe111ea7400dc085"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098098 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"9b9b6b002fe1f98aebafeda8456360bbeeaaab4f03b8594c32f04b5eab1e3278"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098106 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"bd49bcff73dda2e797072b83970a265bbb2b20ee70c3ac6d03ea1a113594d254"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098117 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"3ae9654d0b7c57e628d6ca6ef22427f534e153f2125202c85ca361c2f8b55e97"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.098125 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/alertmanager-main-0" event={"ID":"d6a51fa4-023a-423a-9afa-df77b13b69f8","Type":"ContainerStarted","Data":"4281350bdd774f719bceeb3c1b932abd44bcc64dd5798df191ac91136375c003"} Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.119320 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" podStartSLOduration=2.41814604 podStartE2EDuration="6.119307359s" podCreationTimestamp="2025-12-03 12:27:12 +0000 UTC" firstStartedPulling="2025-12-03 12:27:13.521140535 +0000 UTC m=+379.982988318" lastFinishedPulling="2025-12-03 12:27:17.222301854 +0000 UTC m=+383.684149637" observedRunningTime="2025-12-03 12:27:18.118334855 +0000 UTC m=+384.580182638" watchObservedRunningTime="2025-12-03 12:27:18.119307359 +0000 UTC m=+384.581155142" Dec 03 12:27:18 crc kubenswrapper[4849]: I1203 12:27:18.148699 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/alertmanager-main-0" podStartSLOduration=3.066076561 podStartE2EDuration="7.14868389s" podCreationTimestamp="2025-12-03 12:27:11 +0000 UTC" firstStartedPulling="2025-12-03 12:27:13.139460435 +0000 UTC m=+379.601308218" lastFinishedPulling="2025-12-03 12:27:17.222067765 +0000 UTC m=+383.683915547" observedRunningTime="2025-12-03 12:27:18.138715476 +0000 UTC m=+384.600563279" watchObservedRunningTime="2025-12-03 12:27:18.14868389 +0000 UTC m=+384.610531673" Dec 03 12:27:19 crc kubenswrapper[4849]: I1203 12:27:19.104433 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" event={"ID":"32477d8e-7cc5-4387-a07d-5707f4e85db0","Type":"ContainerStarted","Data":"2a20838e9594c1476a4a8edbc09caaa32b3e4dc69f572deaf513486e8cc4137f"} Dec 03 12:27:19 crc kubenswrapper[4849]: I1203 12:27:19.116179 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" podStartSLOduration=2.7876007449999998 podStartE2EDuration="4.116162588s" podCreationTimestamp="2025-12-03 12:27:15 +0000 UTC" firstStartedPulling="2025-12-03 12:27:17.197400514 +0000 UTC m=+383.659262794" lastFinishedPulling="2025-12-03 12:27:18.525976854 +0000 UTC m=+384.987824637" observedRunningTime="2025-12-03 12:27:19.11506533 +0000 UTC m=+385.576913113" watchObservedRunningTime="2025-12-03 12:27:19.116162588 +0000 UTC m=+385.578010371" Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.113882 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" event={"ID":"5b9f23b5-06da-4c4b-8b8d-7bfa8a9018ba","Type":"ContainerStarted","Data":"2de7a596f2b05f537dafe275044bf524fd7577ad437227f414a698cf4b5ba547"} Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.114847 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.119216 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.125081 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/thanos-querier-d8cbc9c9f-rzkbw" Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.128510 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/monitoring-plugin-7c7f599858-kkshk" podStartSLOduration=2.6891643 podStartE2EDuration="4.128494693s" podCreationTimestamp="2025-12-03 12:27:16 +0000 UTC" firstStartedPulling="2025-12-03 12:27:17.572433029 +0000 UTC m=+384.034280811" lastFinishedPulling="2025-12-03 12:27:19.011763422 +0000 UTC m=+385.473611204" observedRunningTime="2025-12-03 12:27:20.123204331 +0000 UTC m=+386.585052114" watchObservedRunningTime="2025-12-03 12:27:20.128494693 +0000 UTC m=+386.590342476" Dec 03 12:27:20 crc kubenswrapper[4849]: I1203 12:27:20.226556 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" podUID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" containerName="registry" containerID="cri-o://23a0eb6ca1343e2c30ad6eceadee12b13db4d77a0d9b8d849770be3690e9abc7" gracePeriod=30 Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.117613 4849 generic.go:334] "Generic (PLEG): container finished" podID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" containerID="23a0eb6ca1343e2c30ad6eceadee12b13db4d77a0d9b8d849770be3690e9abc7" exitCode=0 Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.117695 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" event={"ID":"c24803cd-8c4d-4ff4-9b85-441f955bd3f3","Type":"ContainerDied","Data":"23a0eb6ca1343e2c30ad6eceadee12b13db4d77a0d9b8d849770be3690e9abc7"} Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.396654 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.573843 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.573882 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.573993 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574023 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574079 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574161 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574193 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574213 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zxz5\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5\") pod \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\" (UID: \"c24803cd-8c4d-4ff4-9b85-441f955bd3f3\") " Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574736 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.574770 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.579952 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.579964 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.580156 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5" (OuterVolumeSpecName: "kube-api-access-4zxz5") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "kube-api-access-4zxz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.580408 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.582444 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.587905 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "c24803cd-8c4d-4ff4-9b85-441f955bd3f3" (UID: "c24803cd-8c4d-4ff4-9b85-441f955bd3f3"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.675848 4849 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676130 4849 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676187 4849 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676235 4849 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676317 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676362 4849 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:21 crc kubenswrapper[4849]: I1203 12:27:21.676441 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zxz5\" (UniqueName: \"kubernetes.io/projected/c24803cd-8c4d-4ff4-9b85-441f955bd3f3-kube-api-access-4zxz5\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.123693 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.123697 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8lbfv" event={"ID":"c24803cd-8c4d-4ff4-9b85-441f955bd3f3","Type":"ContainerDied","Data":"46ecc0526d96760062231cb32c86e3dbb33bff61462b577dc162d196617883d6"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.123738 4849 scope.go:117] "RemoveContainer" containerID="23a0eb6ca1343e2c30ad6eceadee12b13db4d77a0d9b8d849770be3690e9abc7" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129010 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"5f42964ad82bc94df1411b50fd143318ab39a3c0912a84d68f9be0feb07984cb"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129041 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"b1186af07aadba99d5ff3c388a1260d583389c80a6087c8aec6b0b029c9a98a0"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129051 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"c8ecde8bbb416e78cfbe28d6ebcec8d451d090b8ac36345f19e4709d31aafab3"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129060 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"2bb1d7cfff44958dd7395228ee4ed522d71b1be34056ff928dacf5c60014aa51"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129071 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"5c2572d6fda93b7d121cdf0fc678ef281f38d9dc77983f98a655a11c49b788c2"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.129078 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-monitoring/prometheus-k8s-0" event={"ID":"f4598e2f-d0cd-4eff-9082-09bde218d043","Type":"ContainerStarted","Data":"e8ad33707125fe4ebf28872fe4983e5dc1918a680a5437d573d80f7dc40eb705"} Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.142691 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.147126 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8lbfv"] Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.160414 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-monitoring/prometheus-k8s-0" podStartSLOduration=3.03342698 podStartE2EDuration="6.160402578s" podCreationTimestamp="2025-12-03 12:27:16 +0000 UTC" firstStartedPulling="2025-12-03 12:27:18.084032695 +0000 UTC m=+384.545880479" lastFinishedPulling="2025-12-03 12:27:21.211008294 +0000 UTC m=+387.672856077" observedRunningTime="2025-12-03 12:27:22.156894078 +0000 UTC m=+388.618741861" watchObservedRunningTime="2025-12-03 12:27:22.160402578 +0000 UTC m=+388.622250361" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.333114 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.677360 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.677423 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.677506 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.678137 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:27:22 crc kubenswrapper[4849]: I1203 12:27:22.678195 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4" gracePeriod=600 Dec 03 12:27:23 crc kubenswrapper[4849]: I1203 12:27:23.135054 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4" exitCode=0 Dec 03 12:27:23 crc kubenswrapper[4849]: I1203 12:27:23.135084 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4"} Dec 03 12:27:23 crc kubenswrapper[4849]: I1203 12:27:23.135257 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44"} Dec 03 12:27:23 crc kubenswrapper[4849]: I1203 12:27:23.135274 4849 scope.go:117] "RemoveContainer" containerID="79469ff94f49788b33c708456664d6e399b6c8369a2fb4182022a47667a478be" Dec 03 12:27:23 crc kubenswrapper[4849]: I1203 12:27:23.862248 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" path="/var/lib/kubelet/pods/c24803cd-8c4d-4ff4-9b85-441f955bd3f3/volumes" Dec 03 12:27:25 crc kubenswrapper[4849]: I1203 12:27:25.649502 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:25 crc kubenswrapper[4849]: I1203 12:27:25.649722 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:25 crc kubenswrapper[4849]: I1203 12:27:25.653150 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:26 crc kubenswrapper[4849]: I1203 12:27:26.155338 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:27:26 crc kubenswrapper[4849]: I1203 12:27:26.189171 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:27:36 crc kubenswrapper[4849]: I1203 12:27:36.256780 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:36 crc kubenswrapper[4849]: I1203 12:27:36.257083 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.216443 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-z6qrp" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerName="console" containerID="cri-o://7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82" gracePeriod=15 Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.518876 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z6qrp_3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152/console/0.log" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.518937 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.560913 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.560951 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561011 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561033 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561088 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561111 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wr4f\" (UniqueName: \"kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561138 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle\") pod \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\" (UID: \"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152\") " Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561670 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561848 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config" (OuterVolumeSpecName: "console-config") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.561905 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca" (OuterVolumeSpecName: "service-ca") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.562011 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.565985 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.566363 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.566595 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f" (OuterVolumeSpecName: "kube-api-access-7wr4f") pod "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" (UID: "3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152"). InnerVolumeSpecName "kube-api-access-7wr4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662926 4849 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662955 4849 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662964 4849 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662972 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662979 4849 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662988 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wr4f\" (UniqueName: \"kubernetes.io/projected/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-kube-api-access-7wr4f\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:51 crc kubenswrapper[4849]: I1203 12:27:51.662998 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257334 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z6qrp_3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152/console/0.log" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257541 4849 generic.go:334] "Generic (PLEG): container finished" podID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerID="7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82" exitCode=2 Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257568 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6qrp" event={"ID":"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152","Type":"ContainerDied","Data":"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82"} Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257588 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z6qrp" event={"ID":"3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152","Type":"ContainerDied","Data":"46116c7eb5793645b8e07325db5f0f70f594899c6df00023f51d28ae96d284be"} Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257590 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z6qrp" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.257602 4849 scope.go:117] "RemoveContainer" containerID="7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.272293 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.272378 4849 scope.go:117] "RemoveContainer" containerID="7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82" Dec 03 12:27:52 crc kubenswrapper[4849]: E1203 12:27:52.273300 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82\": container with ID starting with 7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82 not found: ID does not exist" containerID="7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.273332 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82"} err="failed to get container status \"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82\": rpc error: code = NotFound desc = could not find container \"7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82\": container with ID starting with 7ff5092b41a6fed760fd0b5973ea5034dd1ba38d0f8735b4d07977b29deb9e82 not found: ID does not exist" Dec 03 12:27:52 crc kubenswrapper[4849]: I1203 12:27:52.273807 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-z6qrp"] Dec 03 12:27:53 crc kubenswrapper[4849]: I1203 12:27:53.861334 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" path="/var/lib/kubelet/pods/3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152/volumes" Dec 03 12:27:56 crc kubenswrapper[4849]: I1203 12:27:56.262701 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:27:56 crc kubenswrapper[4849]: I1203 12:27:56.265828 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/metrics-server-74b988cdf6-hhs6s" Dec 03 12:28:17 crc kubenswrapper[4849]: I1203 12:28:17.333339 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:28:17 crc kubenswrapper[4849]: I1203 12:28:17.357944 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:28:17 crc kubenswrapper[4849]: I1203 12:28:17.380959 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-monitoring/prometheus-k8s-0" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.497236 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:28:50 crc kubenswrapper[4849]: E1203 12:28:50.497719 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" containerName="registry" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.497731 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" containerName="registry" Dec 03 12:28:50 crc kubenswrapper[4849]: E1203 12:28:50.497741 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerName="console" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.497746 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerName="console" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.497863 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a6f5d1b-e29e-4cc2-a3b1-5f8121c4e152" containerName="console" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.497873 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="c24803cd-8c4d-4ff4-9b85-441f955bd3f3" containerName="registry" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.498229 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.508427 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.614465 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.614633 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.614779 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.614857 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.614945 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8jzv\" (UniqueName: \"kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.615018 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.615085 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.715867 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.715907 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.715951 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.715964 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.715989 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8jzv\" (UniqueName: \"kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.716007 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.716025 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.716774 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.717081 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.717111 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.717371 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.720341 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.721007 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.729197 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8jzv\" (UniqueName: \"kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv\") pod \"console-5ccf89d679-frx2r\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:50 crc kubenswrapper[4849]: I1203 12:28:50.811221 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:28:51 crc kubenswrapper[4849]: I1203 12:28:51.164488 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:28:51 crc kubenswrapper[4849]: I1203 12:28:51.500013 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ccf89d679-frx2r" event={"ID":"32181932-4430-45af-9b7f-4f1941d276c3","Type":"ContainerStarted","Data":"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633"} Dec 03 12:28:51 crc kubenswrapper[4849]: I1203 12:28:51.500209 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ccf89d679-frx2r" event={"ID":"32181932-4430-45af-9b7f-4f1941d276c3","Type":"ContainerStarted","Data":"ef6c2dd6124df71ac29f5d34c2c695ef47d1a754e11e6794d7c37ccfce3e12dd"} Dec 03 12:28:51 crc kubenswrapper[4849]: I1203 12:28:51.515305 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5ccf89d679-frx2r" podStartSLOduration=1.515290241 podStartE2EDuration="1.515290241s" podCreationTimestamp="2025-12-03 12:28:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:28:51.512913309 +0000 UTC m=+477.974761092" watchObservedRunningTime="2025-12-03 12:28:51.515290241 +0000 UTC m=+477.977138024" Dec 03 12:29:00 crc kubenswrapper[4849]: I1203 12:29:00.811607 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:29:00 crc kubenswrapper[4849]: I1203 12:29:00.811976 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:29:00 crc kubenswrapper[4849]: I1203 12:29:00.815288 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:29:01 crc kubenswrapper[4849]: I1203 12:29:01.541908 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:29:01 crc kubenswrapper[4849]: I1203 12:29:01.576449 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:29:22 crc kubenswrapper[4849]: I1203 12:29:22.677742 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:29:22 crc kubenswrapper[4849]: I1203 12:29:22.678084 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.602198 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-7dcb9bcd7d-whztg" podUID="9abd75da-eefb-44ec-a9e0-53459780442c" containerName="console" containerID="cri-o://1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b" gracePeriod=15 Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.864153 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7dcb9bcd7d-whztg_9abd75da-eefb-44ec-a9e0-53459780442c/console/0.log" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.864349 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918097 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9bx5\" (UniqueName: \"kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918144 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918209 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918236 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918290 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918324 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918355 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert\") pod \"9abd75da-eefb-44ec-a9e0-53459780442c\" (UID: \"9abd75da-eefb-44ec-a9e0-53459780442c\") " Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918783 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918871 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca" (OuterVolumeSpecName: "service-ca") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918912 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.918956 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config" (OuterVolumeSpecName: "console-config") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.921987 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.922031 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5" (OuterVolumeSpecName: "kube-api-access-b9bx5") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "kube-api-access-b9bx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:29:26 crc kubenswrapper[4849]: I1203 12:29:26.922104 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "9abd75da-eefb-44ec-a9e0-53459780442c" (UID: "9abd75da-eefb-44ec-a9e0-53459780442c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.019971 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9bx5\" (UniqueName: \"kubernetes.io/projected/9abd75da-eefb-44ec-a9e0-53459780442c-kube-api-access-b9bx5\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.019997 4849 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.020006 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.020016 4849 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.020024 4849 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.020032 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9abd75da-eefb-44ec-a9e0-53459780442c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.020039 4849 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9abd75da-eefb-44ec-a9e0-53459780442c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643294 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-7dcb9bcd7d-whztg_9abd75da-eefb-44ec-a9e0-53459780442c/console/0.log" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643476 4849 generic.go:334] "Generic (PLEG): container finished" podID="9abd75da-eefb-44ec-a9e0-53459780442c" containerID="1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b" exitCode=2 Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643501 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcb9bcd7d-whztg" event={"ID":"9abd75da-eefb-44ec-a9e0-53459780442c","Type":"ContainerDied","Data":"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b"} Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643522 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcb9bcd7d-whztg" event={"ID":"9abd75da-eefb-44ec-a9e0-53459780442c","Type":"ContainerDied","Data":"f0b676158296e4f1679d8b87cf74b678e3f96c1099f179792ba9557b044ccac3"} Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643524 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcb9bcd7d-whztg" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.643535 4849 scope.go:117] "RemoveContainer" containerID="1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.655433 4849 scope.go:117] "RemoveContainer" containerID="1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b" Dec 03 12:29:27 crc kubenswrapper[4849]: E1203 12:29:27.655768 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b\": container with ID starting with 1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b not found: ID does not exist" containerID="1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.655801 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b"} err="failed to get container status \"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b\": rpc error: code = NotFound desc = could not find container \"1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b\": container with ID starting with 1495809c4bd91446951a34356ecc241fe37a63487db1be32c22d957328501b4b not found: ID does not exist" Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.663798 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.667051 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-7dcb9bcd7d-whztg"] Dec 03 12:29:27 crc kubenswrapper[4849]: I1203 12:29:27.860726 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9abd75da-eefb-44ec-a9e0-53459780442c" path="/var/lib/kubelet/pods/9abd75da-eefb-44ec-a9e0-53459780442c/volumes" Dec 03 12:29:52 crc kubenswrapper[4849]: I1203 12:29:52.677271 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:29:52 crc kubenswrapper[4849]: I1203 12:29:52.677676 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.145551 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f"] Dec 03 12:30:00 crc kubenswrapper[4849]: E1203 12:30:00.146814 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9abd75da-eefb-44ec-a9e0-53459780442c" containerName="console" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.146891 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="9abd75da-eefb-44ec-a9e0-53459780442c" containerName="console" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.147033 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="9abd75da-eefb-44ec-a9e0-53459780442c" containerName="console" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.147444 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.148546 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.148854 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.151117 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f"] Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.161065 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.161251 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.161298 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzw8w\" (UniqueName: \"kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.262558 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.262778 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.262859 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzw8w\" (UniqueName: \"kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.263488 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.266708 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.275598 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzw8w\" (UniqueName: \"kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w\") pod \"collect-profiles-29412750-7gf4f\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.461971 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:00 crc kubenswrapper[4849]: I1203 12:30:00.790632 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f"] Dec 03 12:30:01 crc kubenswrapper[4849]: I1203 12:30:01.788702 4849 generic.go:334] "Generic (PLEG): container finished" podID="04291f3c-da63-4676-94fc-17fedfee7304" containerID="0c33b6c2420991cfeb58b72d1d33f469b07730324afc47083c87020c7a2e6840" exitCode=0 Dec 03 12:30:01 crc kubenswrapper[4849]: I1203 12:30:01.788799 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" event={"ID":"04291f3c-da63-4676-94fc-17fedfee7304","Type":"ContainerDied","Data":"0c33b6c2420991cfeb58b72d1d33f469b07730324afc47083c87020c7a2e6840"} Dec 03 12:30:01 crc kubenswrapper[4849]: I1203 12:30:01.788897 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" event={"ID":"04291f3c-da63-4676-94fc-17fedfee7304","Type":"ContainerStarted","Data":"e4734bacfdd18c604b27727708d71ab798bbaf53ea2a5096e9ff2cfe173f51b6"} Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.948807 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.994014 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume\") pod \"04291f3c-da63-4676-94fc-17fedfee7304\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.994056 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume\") pod \"04291f3c-da63-4676-94fc-17fedfee7304\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.994095 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzw8w\" (UniqueName: \"kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w\") pod \"04291f3c-da63-4676-94fc-17fedfee7304\" (UID: \"04291f3c-da63-4676-94fc-17fedfee7304\") " Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.994578 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume" (OuterVolumeSpecName: "config-volume") pod "04291f3c-da63-4676-94fc-17fedfee7304" (UID: "04291f3c-da63-4676-94fc-17fedfee7304"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.998031 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w" (OuterVolumeSpecName: "kube-api-access-bzw8w") pod "04291f3c-da63-4676-94fc-17fedfee7304" (UID: "04291f3c-da63-4676-94fc-17fedfee7304"). InnerVolumeSpecName "kube-api-access-bzw8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:30:02 crc kubenswrapper[4849]: I1203 12:30:02.998140 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "04291f3c-da63-4676-94fc-17fedfee7304" (UID: "04291f3c-da63-4676-94fc-17fedfee7304"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.094984 4849 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/04291f3c-da63-4676-94fc-17fedfee7304-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.095013 4849 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/04291f3c-da63-4676-94fc-17fedfee7304-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.095023 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzw8w\" (UniqueName: \"kubernetes.io/projected/04291f3c-da63-4676-94fc-17fedfee7304-kube-api-access-bzw8w\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.798310 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" event={"ID":"04291f3c-da63-4676-94fc-17fedfee7304","Type":"ContainerDied","Data":"e4734bacfdd18c604b27727708d71ab798bbaf53ea2a5096e9ff2cfe173f51b6"} Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.798335 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412750-7gf4f" Dec 03 12:30:03 crc kubenswrapper[4849]: I1203 12:30:03.798343 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4734bacfdd18c604b27727708d71ab798bbaf53ea2a5096e9ff2cfe173f51b6" Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.677582 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.678590 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.678661 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.679110 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.679167 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44" gracePeriod=600 Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.870579 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44" exitCode=0 Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.870608 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44"} Dec 03 12:30:22 crc kubenswrapper[4849]: I1203 12:30:22.870630 4849 scope.go:117] "RemoveContainer" containerID="17ec90961f5f8754905eb5b0226119e23c94a47d88996ec1a2c526a0bafcd1b4" Dec 03 12:30:23 crc kubenswrapper[4849]: I1203 12:30:23.875320 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67"} Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.831136 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh"] Dec 03 12:30:46 crc kubenswrapper[4849]: E1203 12:30:46.831604 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04291f3c-da63-4676-94fc-17fedfee7304" containerName="collect-profiles" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.831614 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="04291f3c-da63-4676-94fc-17fedfee7304" containerName="collect-profiles" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.831739 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="04291f3c-da63-4676-94fc-17fedfee7304" containerName="collect-profiles" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.832382 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.835134 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.837686 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh"] Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.976948 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.977001 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:46 crc kubenswrapper[4849]: I1203 12:30:46.977081 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkqnq\" (UniqueName: \"kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.078683 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.078916 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.078969 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkqnq\" (UniqueName: \"kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.079709 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.079927 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.099672 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkqnq\" (UniqueName: \"kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.145525 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.481527 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh"] Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.969124 4849 generic.go:334] "Generic (PLEG): container finished" podID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerID="ebfc411f72b6740d34da8445a227dc38273cf43c1d1f1a9ed516f52309fc0dfe" exitCode=0 Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.969194 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" event={"ID":"09bda4af-bca8-4f80-9d40-ab5f3e5459ce","Type":"ContainerDied","Data":"ebfc411f72b6740d34da8445a227dc38273cf43c1d1f1a9ed516f52309fc0dfe"} Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.969316 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" event={"ID":"09bda4af-bca8-4f80-9d40-ab5f3e5459ce","Type":"ContainerStarted","Data":"c5cc2d63423c85071db21cfdb848472b6d2d4766e3cc99aa2c3f13134c90ce6e"} Dec 03 12:30:47 crc kubenswrapper[4849]: I1203 12:30:47.970516 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:30:49 crc kubenswrapper[4849]: I1203 12:30:49.979019 4849 generic.go:334] "Generic (PLEG): container finished" podID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerID="bfff62a37dfe73d4ffe946b82372c6b3deebe4b21ddf613709782cd4b81bee06" exitCode=0 Dec 03 12:30:49 crc kubenswrapper[4849]: I1203 12:30:49.979091 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" event={"ID":"09bda4af-bca8-4f80-9d40-ab5f3e5459ce","Type":"ContainerDied","Data":"bfff62a37dfe73d4ffe946b82372c6b3deebe4b21ddf613709782cd4b81bee06"} Dec 03 12:30:50 crc kubenswrapper[4849]: I1203 12:30:50.985122 4849 generic.go:334] "Generic (PLEG): container finished" podID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerID="9cfa6a5383a8441a7815eb1b3990ac0ef35c4a1fe278eb21ba23fc00167f2f71" exitCode=0 Dec 03 12:30:50 crc kubenswrapper[4849]: I1203 12:30:50.985171 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" event={"ID":"09bda4af-bca8-4f80-9d40-ab5f3e5459ce","Type":"ContainerDied","Data":"9cfa6a5383a8441a7815eb1b3990ac0ef35c4a1fe278eb21ba23fc00167f2f71"} Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.147228 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.238170 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkqnq\" (UniqueName: \"kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq\") pod \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.238527 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle\") pod \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.238671 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util\") pod \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\" (UID: \"09bda4af-bca8-4f80-9d40-ab5f3e5459ce\") " Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.240537 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle" (OuterVolumeSpecName: "bundle") pod "09bda4af-bca8-4f80-9d40-ab5f3e5459ce" (UID: "09bda4af-bca8-4f80-9d40-ab5f3e5459ce"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.243416 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq" (OuterVolumeSpecName: "kube-api-access-tkqnq") pod "09bda4af-bca8-4f80-9d40-ab5f3e5459ce" (UID: "09bda4af-bca8-4f80-9d40-ab5f3e5459ce"). InnerVolumeSpecName "kube-api-access-tkqnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.249177 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util" (OuterVolumeSpecName: "util") pod "09bda4af-bca8-4f80-9d40-ab5f3e5459ce" (UID: "09bda4af-bca8-4f80-9d40-ab5f3e5459ce"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.341793 4849 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.341989 4849 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.342000 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkqnq\" (UniqueName: \"kubernetes.io/projected/09bda4af-bca8-4f80-9d40-ab5f3e5459ce-kube-api-access-tkqnq\") on node \"crc\" DevicePath \"\"" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.997040 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" event={"ID":"09bda4af-bca8-4f80-9d40-ab5f3e5459ce","Type":"ContainerDied","Data":"c5cc2d63423c85071db21cfdb848472b6d2d4766e3cc99aa2c3f13134c90ce6e"} Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.997073 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5cc2d63423c85071db21cfdb848472b6d2d4766e3cc99aa2c3f13134c90ce6e" Dec 03 12:30:52 crc kubenswrapper[4849]: I1203 12:30:52.997339 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh" Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.934157 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tkrt4"] Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935671 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-controller" containerID="cri-o://59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935711 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="nbdb" containerID="cri-o://fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935754 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-node" containerID="cri-o://1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935821 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-acl-logging" containerID="cri-o://274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935850 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="sbdb" containerID="cri-o://1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935901 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="northd" containerID="cri-o://5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.935998 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" gracePeriod=30 Dec 03 12:30:58 crc kubenswrapper[4849]: I1203 12:30:58.957067 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" containerID="cri-o://07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" gracePeriod=30 Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.022354 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/2.log" Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.022749 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/1.log" Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.022791 4849 generic.go:334] "Generic (PLEG): container finished" podID="1b60c35d-f388-49eb-a5d8-09a6cc752575" containerID="ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098" exitCode=2 Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.022817 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerDied","Data":"ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098"} Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.022855 4849 scope.go:117] "RemoveContainer" containerID="c366bcc45a52929441f0992463074c30100244da02eeb2b1f5e22150b91be24d" Dec 03 12:30:59 crc kubenswrapper[4849]: I1203 12:30:59.023344 4849 scope.go:117] "RemoveContainer" containerID="ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098" Dec 03 12:30:59 crc kubenswrapper[4849]: E1203 12:30:59.023610 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-2pjsx_openshift-multus(1b60c35d-f388-49eb-a5d8-09a6cc752575)\"" pod="openshift-multus/multus-2pjsx" podUID="1b60c35d-f388-49eb-a5d8-09a6cc752575" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.029432 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovnkube-controller/3.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.031667 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-acl-logging/0.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032205 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-controller/0.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032549 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" exitCode=0 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032570 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" exitCode=0 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032578 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" exitCode=0 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032586 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" exitCode=0 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032592 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" exitCode=143 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032598 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" exitCode=143 Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032616 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032659 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032672 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032681 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032688 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032696 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb"} Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.032718 4849 scope.go:117] "RemoveContainer" containerID="bbb64c2ac73b6ae27782d296de4e2c7223e5cc219fc8c99740e7a5a74c47ebc3" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.034093 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/2.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.161631 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-acl-logging/0.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.162117 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-controller/0.log" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.162461 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198563 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w447b"] Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198798 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="nbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198815 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="nbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198826 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198832 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198838 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198843 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198851 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-acl-logging" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198857 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-acl-logging" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198866 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-node" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198871 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-node" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198882 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198887 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198892 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198898 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198905 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="northd" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198911 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="northd" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198920 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198926 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198933 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="pull" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198939 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="pull" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198947 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="sbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198951 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="sbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198959 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="extract" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198964 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="extract" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198971 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198976 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198983 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kubecfg-setup" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.198988 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kubecfg-setup" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.198998 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="util" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199003 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="util" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199092 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199102 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199111 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="nbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199116 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199124 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="sbdb" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199131 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199138 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-acl-logging" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199144 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199161 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovn-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199169 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="09bda4af-bca8-4f80-9d40-ab5f3e5459ce" containerName="extract" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199175 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="kube-rbac-proxy-node" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199180 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="northd" Dec 03 12:31:00 crc kubenswrapper[4849]: E1203 12:31:00.199272 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199278 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.199357 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerName="ovnkube-controller" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.200812 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350115 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350187 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350206 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350232 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350247 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350249 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350283 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350301 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350309 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350305 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350327 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350353 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350315 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350347 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350371 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350405 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350459 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6m9x\" (UniqueName: \"kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350479 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350499 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350519 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350538 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350541 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350550 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350559 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350588 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350592 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350573 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350567 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log" (OuterVolumeSpecName: "node-log") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350671 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket" (OuterVolumeSpecName: "log-socket") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350628 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350653 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350686 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350736 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350761 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units\") pod \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\" (UID: \"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677\") " Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350860 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash" (OuterVolumeSpecName: "host-slash") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350923 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350936 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350947 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gs4t\" (UniqueName: \"kubernetes.io/projected/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-kube-api-access-9gs4t\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.350953 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351021 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-kubelet\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351044 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351093 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-config\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351111 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-bin\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351147 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-env-overrides\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351164 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-systemd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351194 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-log-socket\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351222 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-slash\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351237 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovn-node-metrics-cert\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351253 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-netd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351303 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351354 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-var-lib-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351413 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-ovn\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351445 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-systemd-units\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351458 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-netns\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351538 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-node-log\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351619 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-script-lib\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351691 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-etc-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351782 4849 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351794 4849 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351804 4849 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351814 4849 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351822 4849 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351831 4849 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351839 4849 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351847 4849 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351854 4849 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351862 4849 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351869 4849 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351877 4849 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351885 4849 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351893 4849 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351901 4849 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351909 4849 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.351916 4849 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.355131 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.355799 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x" (OuterVolumeSpecName: "kube-api-access-x6m9x") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "kube-api-access-x6m9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.361727 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" (UID: "3d8dd3fd-f66b-4e40-a41b-e444e5e8b677"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452837 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-ovn\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452891 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-netns\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452910 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-systemd-units\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452960 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-node-log\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452956 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-ovn\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452976 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-netns\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453011 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-node-log\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453038 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-systemd-units\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.452979 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-script-lib\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453681 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-script-lib\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453688 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-etc-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453739 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-etc-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453759 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453782 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gs4t\" (UniqueName: \"kubernetes.io/projected/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-kube-api-access-9gs4t\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453786 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453817 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-kubelet\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453832 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453849 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-config\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453869 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-bin\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453897 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-env-overrides\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453914 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-systemd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453923 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453937 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-log-socket\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453962 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-log-socket\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453849 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-kubelet\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.453991 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-slash\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454002 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-run-systemd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454011 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-netd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454016 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-bin\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454025 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovn-node-metrics-cert\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454082 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454113 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-var-lib-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454205 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-var-lib-openvswitch\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454215 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-slash\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454221 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6m9x\" (UniqueName: \"kubernetes.io/projected/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-kube-api-access-x6m9x\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454253 4849 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454264 4849 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454253 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-run-ovn-kubernetes\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454233 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-host-cni-netd\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454363 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovnkube-config\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.454365 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-env-overrides\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.457396 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-ovn-node-metrics-cert\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.466278 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gs4t\" (UniqueName: \"kubernetes.io/projected/78fd1215-ac5b-477a-a7f7-e85b725a2d9a-kube-api-access-9gs4t\") pod \"ovnkube-node-w447b\" (UID: \"78fd1215-ac5b-477a-a7f7-e85b725a2d9a\") " pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: I1203 12:31:00.512014 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:00 crc kubenswrapper[4849]: W1203 12:31:00.526920 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78fd1215_ac5b_477a_a7f7_e85b725a2d9a.slice/crio-93d48f04a0e0033ffff5f582a2565d76d26f3f838294c0f8eb73ef1970cbe3ba WatchSource:0}: Error finding container 93d48f04a0e0033ffff5f582a2565d76d26f3f838294c0f8eb73ef1970cbe3ba: Status 404 returned error can't find the container with id 93d48f04a0e0033ffff5f582a2565d76d26f3f838294c0f8eb73ef1970cbe3ba Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.029541 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.030408 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.031750 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.032924 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-4p5tm" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.032826 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.043042 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-acl-logging/0.log" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.043771 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tkrt4_3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/ovn-controller/0.log" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044072 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" exitCode=0 Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044095 4849 generic.go:334] "Generic (PLEG): container finished" podID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" containerID="1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" exitCode=0 Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044143 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06"} Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044165 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044176 4849 scope.go:117] "RemoveContainer" containerID="07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044165 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974"} Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.044266 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tkrt4" event={"ID":"3d8dd3fd-f66b-4e40-a41b-e444e5e8b677","Type":"ContainerDied","Data":"1c5165cb511d822539f770794f2151e79eb07aa6dcd53ef91316963465ab80ae"} Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.045734 4849 generic.go:334] "Generic (PLEG): container finished" podID="78fd1215-ac5b-477a-a7f7-e85b725a2d9a" containerID="e4adfe3c8696b972eceb38c8b8559f56faf9d631fc9f8ccb98fde91a358734cf" exitCode=0 Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.045755 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerDied","Data":"e4adfe3c8696b972eceb38c8b8559f56faf9d631fc9f8ccb98fde91a358734cf"} Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.045768 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"93d48f04a0e0033ffff5f582a2565d76d26f3f838294c0f8eb73ef1970cbe3ba"} Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.063027 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n6fq\" (UniqueName: \"kubernetes.io/projected/4289cd35-0b12-4095-9c25-b071d4351d32-kube-api-access-5n6fq\") pod \"obo-prometheus-operator-668cf9dfbb-7zxv7\" (UID: \"4289cd35-0b12-4095-9c25-b071d4351d32\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.063939 4849 scope.go:117] "RemoveContainer" containerID="1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.084375 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tkrt4"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.089682 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tkrt4"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.092606 4849 scope.go:117] "RemoveContainer" containerID="fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.106412 4849 scope.go:117] "RemoveContainer" containerID="5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.123131 4849 scope.go:117] "RemoveContainer" containerID="ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.135638 4849 scope.go:117] "RemoveContainer" containerID="1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.141894 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.142523 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.143880 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.144127 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-s8qgc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.154716 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.155293 4849 scope.go:117] "RemoveContainer" containerID="274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.157856 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.166880 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.166930 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.166962 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.167019 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n6fq\" (UniqueName: \"kubernetes.io/projected/4289cd35-0b12-4095-9c25-b071d4351d32-kube-api-access-5n6fq\") pod \"obo-prometheus-operator-668cf9dfbb-7zxv7\" (UID: \"4289cd35-0b12-4095-9c25-b071d4351d32\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.167035 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.188817 4849 scope.go:117] "RemoveContainer" containerID="59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.189618 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n6fq\" (UniqueName: \"kubernetes.io/projected/4289cd35-0b12-4095-9c25-b071d4351d32-kube-api-access-5n6fq\") pod \"obo-prometheus-operator-668cf9dfbb-7zxv7\" (UID: \"4289cd35-0b12-4095-9c25-b071d4351d32\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.216966 4849 scope.go:117] "RemoveContainer" containerID="1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.241533 4849 scope.go:117] "RemoveContainer" containerID="07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.241922 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0\": container with ID starting with 07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0 not found: ID does not exist" containerID="07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.241953 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0"} err="failed to get container status \"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0\": rpc error: code = NotFound desc = could not find container \"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0\": container with ID starting with 07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.241971 4849 scope.go:117] "RemoveContainer" containerID="1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.242187 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\": container with ID starting with 1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86 not found: ID does not exist" containerID="1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242219 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86"} err="failed to get container status \"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\": rpc error: code = NotFound desc = could not find container \"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\": container with ID starting with 1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242234 4849 scope.go:117] "RemoveContainer" containerID="fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.242491 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\": container with ID starting with fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9 not found: ID does not exist" containerID="fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242560 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9"} err="failed to get container status \"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\": rpc error: code = NotFound desc = could not find container \"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\": container with ID starting with fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242622 4849 scope.go:117] "RemoveContainer" containerID="5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.242886 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\": container with ID starting with 5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf not found: ID does not exist" containerID="5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242906 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf"} err="failed to get container status \"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\": rpc error: code = NotFound desc = could not find container \"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\": container with ID starting with 5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.242918 4849 scope.go:117] "RemoveContainer" containerID="ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.243104 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\": container with ID starting with ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06 not found: ID does not exist" containerID="ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243122 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06"} err="failed to get container status \"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\": rpc error: code = NotFound desc = could not find container \"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\": container with ID starting with ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243134 4849 scope.go:117] "RemoveContainer" containerID="1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.243291 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\": container with ID starting with 1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974 not found: ID does not exist" containerID="1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243322 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974"} err="failed to get container status \"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\": rpc error: code = NotFound desc = could not find container \"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\": container with ID starting with 1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243334 4849 scope.go:117] "RemoveContainer" containerID="274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.243601 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\": container with ID starting with 274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c not found: ID does not exist" containerID="274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243689 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c"} err="failed to get container status \"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\": rpc error: code = NotFound desc = could not find container \"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\": container with ID starting with 274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.243800 4849 scope.go:117] "RemoveContainer" containerID="59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.244023 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\": container with ID starting with 59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb not found: ID does not exist" containerID="59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244092 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb"} err="failed to get container status \"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\": rpc error: code = NotFound desc = could not find container \"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\": container with ID starting with 59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244150 4849 scope.go:117] "RemoveContainer" containerID="1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.244374 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\": container with ID starting with 1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027 not found: ID does not exist" containerID="1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244393 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027"} err="failed to get container status \"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\": rpc error: code = NotFound desc = could not find container \"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\": container with ID starting with 1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244406 4849 scope.go:117] "RemoveContainer" containerID="07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244560 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0"} err="failed to get container status \"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0\": rpc error: code = NotFound desc = could not find container \"07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0\": container with ID starting with 07f573d0a7f15ae29fcde53335f93b72bb9e9d1ac61eebfbef76b084d6f300e0 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244580 4849 scope.go:117] "RemoveContainer" containerID="1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244920 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86"} err="failed to get container status \"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\": rpc error: code = NotFound desc = could not find container \"1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86\": container with ID starting with 1746eeb0c6f959c98776c5cb3f52a3e168a04b904a1f22cfb973055763955c86 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.244985 4849 scope.go:117] "RemoveContainer" containerID="fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245224 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9"} err="failed to get container status \"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\": rpc error: code = NotFound desc = could not find container \"fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9\": container with ID starting with fb0ecadf341a169f072ae17fffb2b96676ec7a0354a518548e8c538ba58618f9 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245241 4849 scope.go:117] "RemoveContainer" containerID="5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245470 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf"} err="failed to get container status \"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\": rpc error: code = NotFound desc = could not find container \"5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf\": container with ID starting with 5058f82c65e336f485789f1a4741890943aa8cab3b7e85e8e9a844b45540a0cf not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245564 4849 scope.go:117] "RemoveContainer" containerID="ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245859 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06"} err="failed to get container status \"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\": rpc error: code = NotFound desc = could not find container \"ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06\": container with ID starting with ac0b26041767bdce7716036e33295c54c0de5936677e4e561df5b1efb2ac5b06 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.245876 4849 scope.go:117] "RemoveContainer" containerID="1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246140 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974"} err="failed to get container status \"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\": rpc error: code = NotFound desc = could not find container \"1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974\": container with ID starting with 1e94622d49c7c6bd702bb575f9d3ec30aaccfef96f02db548065590f6f687974 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246178 4849 scope.go:117] "RemoveContainer" containerID="274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246411 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c"} err="failed to get container status \"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\": rpc error: code = NotFound desc = could not find container \"274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c\": container with ID starting with 274f4088c4d8992f014c11613656642f9d1b365bb5e1626691f6a643d32b7d0c not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246429 4849 scope.go:117] "RemoveContainer" containerID="59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246588 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb"} err="failed to get container status \"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\": rpc error: code = NotFound desc = could not find container \"59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb\": container with ID starting with 59bcd9cf68cb01b85f0b4419c23076ab8eafb363f6562cecc0d6e7a96ae829fb not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.246756 4849 scope.go:117] "RemoveContainer" containerID="1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.247051 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027"} err="failed to get container status \"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\": rpc error: code = NotFound desc = could not find container \"1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027\": container with ID starting with 1bbcc15cf7cd576b749f1b7d5e329316cf855b5121a2eab6527cf642d8b73027 not found: ID does not exist" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.268360 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.268416 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.268450 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.268518 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.272535 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.272942 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.274215 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0af871d7-9f6d-49ba-97ad-d0d36cceed52-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz\" (UID: \"0af871d7-9f6d-49ba-97ad-d0d36cceed52\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.275625 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ffaeeb44-3be6-4407-9095-339c36ae6c58-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p\" (UID: \"ffaeeb44-3be6-4407-9095-339c36ae6c58\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.332277 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-4p9g9"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.333180 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.335241 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-tfl6m" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.335296 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.358544 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.369314 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr2gx\" (UniqueName: \"kubernetes.io/projected/64bb0224-aa3f-4b8a-854e-251422819440-kube-api-access-tr2gx\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.369362 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/64bb0224-aa3f-4b8a-854e-251422819440-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.376735 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(7ce4cffcf477c096933be4fc022abdcc2679f2a30fb3340262358a7249424bf9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.376796 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(7ce4cffcf477c096933be4fc022abdcc2679f2a30fb3340262358a7249424bf9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.376818 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(7ce4cffcf477c096933be4fc022abdcc2679f2a30fb3340262358a7249424bf9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.376862 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(7ce4cffcf477c096933be4fc022abdcc2679f2a30fb3340262358a7249424bf9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" podUID="4289cd35-0b12-4095-9c25-b071d4351d32" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.446365 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-xtwpc"] Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.447046 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.448900 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-sbv5z" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.464851 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.470252 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c7f06b-878f-4814-8196-b4a91e2fbca7-openshift-service-ca\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.470345 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr2gx\" (UniqueName: \"kubernetes.io/projected/64bb0224-aa3f-4b8a-854e-251422819440-kube-api-access-tr2gx\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.470389 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/64bb0224-aa3f-4b8a-854e-251422819440-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.470478 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjkjn\" (UniqueName: \"kubernetes.io/projected/49c7f06b-878f-4814-8196-b4a91e2fbca7-kube-api-access-zjkjn\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.474169 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/64bb0224-aa3f-4b8a-854e-251422819440-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.475921 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.491209 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr2gx\" (UniqueName: \"kubernetes.io/projected/64bb0224-aa3f-4b8a-854e-251422819440-kube-api-access-tr2gx\") pod \"observability-operator-d8bb48f5d-4p9g9\" (UID: \"64bb0224-aa3f-4b8a-854e-251422819440\") " pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.505860 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(7fb5a308661bfaef681acbbe827c7b0dd93a87b9e7c8e2827bc756b3b7107c13): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.505921 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(7fb5a308661bfaef681acbbe827c7b0dd93a87b9e7c8e2827bc756b3b7107c13): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.505943 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(7fb5a308661bfaef681acbbe827c7b0dd93a87b9e7c8e2827bc756b3b7107c13): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.505982 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(7fb5a308661bfaef681acbbe827c7b0dd93a87b9e7c8e2827bc756b3b7107c13): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" podUID="ffaeeb44-3be6-4407-9095-339c36ae6c58" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.512596 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(e05dfe948681b39dd1b9926d331781779bd89cea19c9ac72f54a3ccb705a8a0d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.512638 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(e05dfe948681b39dd1b9926d331781779bd89cea19c9ac72f54a3ccb705a8a0d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.512674 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(e05dfe948681b39dd1b9926d331781779bd89cea19c9ac72f54a3ccb705a8a0d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.512730 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(e05dfe948681b39dd1b9926d331781779bd89cea19c9ac72f54a3ccb705a8a0d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" podUID="0af871d7-9f6d-49ba-97ad-d0d36cceed52" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.571653 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjkjn\" (UniqueName: \"kubernetes.io/projected/49c7f06b-878f-4814-8196-b4a91e2fbca7-kube-api-access-zjkjn\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.571858 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c7f06b-878f-4814-8196-b4a91e2fbca7-openshift-service-ca\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.572616 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c7f06b-878f-4814-8196-b4a91e2fbca7-openshift-service-ca\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.592314 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjkjn\" (UniqueName: \"kubernetes.io/projected/49c7f06b-878f-4814-8196-b4a91e2fbca7-kube-api-access-zjkjn\") pod \"perses-operator-5446b9c989-xtwpc\" (UID: \"49c7f06b-878f-4814-8196-b4a91e2fbca7\") " pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.663917 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.685217 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(54891ae60b603fd174b30e7f39f9fd8ea87a713ed3ffe81c28abc51509f95de5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.685274 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(54891ae60b603fd174b30e7f39f9fd8ea87a713ed3ffe81c28abc51509f95de5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.685299 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(54891ae60b603fd174b30e7f39f9fd8ea87a713ed3ffe81c28abc51509f95de5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.685343 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(54891ae60b603fd174b30e7f39f9fd8ea87a713ed3ffe81c28abc51509f95de5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" podUID="64bb0224-aa3f-4b8a-854e-251422819440" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.765977 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.784853 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(7a6da9cc25d89828f12be32733038539eeca1868cde1893d8ac5231ac73dca31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.784912 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(7a6da9cc25d89828f12be32733038539eeca1868cde1893d8ac5231ac73dca31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.784935 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(7a6da9cc25d89828f12be32733038539eeca1868cde1893d8ac5231ac73dca31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:01 crc kubenswrapper[4849]: E1203 12:31:01.784984 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(7a6da9cc25d89828f12be32733038539eeca1868cde1893d8ac5231ac73dca31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" podUID="49c7f06b-878f-4814-8196-b4a91e2fbca7" Dec 03 12:31:01 crc kubenswrapper[4849]: I1203 12:31:01.862349 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d8dd3fd-f66b-4e40-a41b-e444e5e8b677" path="/var/lib/kubelet/pods/3d8dd3fd-f66b-4e40-a41b-e444e5e8b677/volumes" Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053630 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"3e27f39759baf9e0ad7390d1531f7bbcb41b3ab2052e07bb3a4702089c9eddd0"} Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053862 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"79876773052beaa631624205bcba840228fb625fe7b47e55ac1e777350c9cf34"} Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053874 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"4d44c4d7ea691e2553ca7c6952f65510bfadff2d5a0ae2b311167ea1fe59e641"} Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053882 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"4d28894380af17532bf9c3eaf32586ef609840ab1010475436a86cf4b4cbbb29"} Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053890 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"b0ef9da2e9374f0307fd16d48cd91fd3ecbd6264b903bee82396baed845a4f91"} Dec 03 12:31:02 crc kubenswrapper[4849]: I1203 12:31:02.053897 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"e79c1ec10154c4aeb0760b7d9c2ef9598aaa6a9d630ca9d62c546d97692e2579"} Dec 03 12:31:04 crc kubenswrapper[4849]: I1203 12:31:04.064284 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"00bb1e3af85f7e4f401d5b563a4c3e07cba678e6110525d0f4fa6f74f8f117a1"} Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.076379 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" event={"ID":"78fd1215-ac5b-477a-a7f7-e85b725a2d9a","Type":"ContainerStarted","Data":"9b366259bfa645328c06b46eae9508c70612540ffea655566866095fdfab4362"} Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.076670 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.076690 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.118663 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.145321 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" podStartSLOduration=6.145306392 podStartE2EDuration="6.145306392s" podCreationTimestamp="2025-12-03 12:31:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:31:06.119834686 +0000 UTC m=+612.581682468" watchObservedRunningTime="2025-12-03 12:31:06.145306392 +0000 UTC m=+612.607154175" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.568288 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p"] Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.568404 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.568980 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.580778 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7"] Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.580980 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.581525 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.601843 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-xtwpc"] Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.601941 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.602318 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.604551 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(ba4aae87596f0719e13430db5d97f7b5d5d244b5784f7b9082acb02a27d1e621): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.604591 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(ba4aae87596f0719e13430db5d97f7b5d5d244b5784f7b9082acb02a27d1e621): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.604607 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(ba4aae87596f0719e13430db5d97f7b5d5d244b5784f7b9082acb02a27d1e621): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.604659 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(ba4aae87596f0719e13430db5d97f7b5d5d244b5784f7b9082acb02a27d1e621): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" podUID="ffaeeb44-3be6-4407-9095-339c36ae6c58" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.607355 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-4p9g9"] Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.607583 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.608169 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.612375 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz"] Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.612660 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:06 crc kubenswrapper[4849]: I1203 12:31:06.613086 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.617458 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(b692c8ac4878e7e56fd1f0d361ada8c27654c64f754023eaabfd15fc3e83aee8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.617517 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(b692c8ac4878e7e56fd1f0d361ada8c27654c64f754023eaabfd15fc3e83aee8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.617539 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(b692c8ac4878e7e56fd1f0d361ada8c27654c64f754023eaabfd15fc3e83aee8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.617573 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(b692c8ac4878e7e56fd1f0d361ada8c27654c64f754023eaabfd15fc3e83aee8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" podUID="4289cd35-0b12-4095-9c25-b071d4351d32" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.636310 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(1f1619a53577c1ccf8a821282f11d290b16b14a69b46bb8c401d430fab51d6ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.636363 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(1f1619a53577c1ccf8a821282f11d290b16b14a69b46bb8c401d430fab51d6ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.636384 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(1f1619a53577c1ccf8a821282f11d290b16b14a69b46bb8c401d430fab51d6ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.636425 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(1f1619a53577c1ccf8a821282f11d290b16b14a69b46bb8c401d430fab51d6ca): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" podUID="49c7f06b-878f-4814-8196-b4a91e2fbca7" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.652270 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(d0b725ee8c06cf8f9108109fa8784f64e6dfaa8b51035daab8cabf65d72ab5a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.652329 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(d0b725ee8c06cf8f9108109fa8784f64e6dfaa8b51035daab8cabf65d72ab5a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.652350 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(d0b725ee8c06cf8f9108109fa8784f64e6dfaa8b51035daab8cabf65d72ab5a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.652392 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(d0b725ee8c06cf8f9108109fa8784f64e6dfaa8b51035daab8cabf65d72ab5a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" podUID="64bb0224-aa3f-4b8a-854e-251422819440" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.659262 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(5e7bd28df65bb5b2bd2a8c822cfd9fa150a603e22e232ccf640ddb81d723f151): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.659324 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(5e7bd28df65bb5b2bd2a8c822cfd9fa150a603e22e232ccf640ddb81d723f151): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.659346 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(5e7bd28df65bb5b2bd2a8c822cfd9fa150a603e22e232ccf640ddb81d723f151): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:06 crc kubenswrapper[4849]: E1203 12:31:06.659395 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(5e7bd28df65bb5b2bd2a8c822cfd9fa150a603e22e232ccf640ddb81d723f151): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" podUID="0af871d7-9f6d-49ba-97ad-d0d36cceed52" Dec 03 12:31:07 crc kubenswrapper[4849]: I1203 12:31:07.080920 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:07 crc kubenswrapper[4849]: I1203 12:31:07.105159 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:13 crc kubenswrapper[4849]: I1203 12:31:13.858834 4849 scope.go:117] "RemoveContainer" containerID="ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098" Dec 03 12:31:13 crc kubenswrapper[4849]: E1203 12:31:13.859344 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-2pjsx_openshift-multus(1b60c35d-f388-49eb-a5d8-09a6cc752575)\"" pod="openshift-multus/multus-2pjsx" podUID="1b60c35d-f388-49eb-a5d8-09a6cc752575" Dec 03 12:31:16 crc kubenswrapper[4849]: I1203 12:31:16.855539 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:16 crc kubenswrapper[4849]: I1203 12:31:16.856047 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:16 crc kubenswrapper[4849]: E1203 12:31:16.879089 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(c92b619f6b1c6a31f7648f842768c399ed850cdd2b70387b463be3dd90952212): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:16 crc kubenswrapper[4849]: E1203 12:31:16.879137 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(c92b619f6b1c6a31f7648f842768c399ed850cdd2b70387b463be3dd90952212): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:16 crc kubenswrapper[4849]: E1203 12:31:16.879156 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(c92b619f6b1c6a31f7648f842768c399ed850cdd2b70387b463be3dd90952212): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:16 crc kubenswrapper[4849]: E1203 12:31:16.879189 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators(ffaeeb44-3be6-4407-9095-339c36ae6c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_openshift-operators_ffaeeb44-3be6-4407-9095-339c36ae6c58_0(c92b619f6b1c6a31f7648f842768c399ed850cdd2b70387b463be3dd90952212): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" podUID="ffaeeb44-3be6-4407-9095-339c36ae6c58" Dec 03 12:31:18 crc kubenswrapper[4849]: I1203 12:31:18.857792 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:18 crc kubenswrapper[4849]: I1203 12:31:18.858865 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:18 crc kubenswrapper[4849]: I1203 12:31:18.859739 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:18 crc kubenswrapper[4849]: I1203 12:31:18.859830 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.884450 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(afd5d6c34d86936b8b0b0396efe8e2d9ded5f0cda1252e575f685be6d8f37ff2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.884556 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(afd5d6c34d86936b8b0b0396efe8e2d9ded5f0cda1252e575f685be6d8f37ff2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.884623 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(afd5d6c34d86936b8b0b0396efe8e2d9ded5f0cda1252e575f685be6d8f37ff2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.884749 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators(4289cd35-0b12-4095-9c25-b071d4351d32)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-7zxv7_openshift-operators_4289cd35-0b12-4095-9c25-b071d4351d32_0(afd5d6c34d86936b8b0b0396efe8e2d9ded5f0cda1252e575f685be6d8f37ff2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" podUID="4289cd35-0b12-4095-9c25-b071d4351d32" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.889165 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(620370ea7824248951274175d1b4911b878ac2e4c7e7fedaf14509e796f76b25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.889225 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(620370ea7824248951274175d1b4911b878ac2e4c7e7fedaf14509e796f76b25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.889244 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(620370ea7824248951274175d1b4911b878ac2e4c7e7fedaf14509e796f76b25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:18 crc kubenswrapper[4849]: E1203 12:31:18.889286 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-4p9g9_openshift-operators(64bb0224-aa3f-4b8a-854e-251422819440)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-4p9g9_openshift-operators_64bb0224-aa3f-4b8a-854e-251422819440_0(620370ea7824248951274175d1b4911b878ac2e4c7e7fedaf14509e796f76b25): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" podUID="64bb0224-aa3f-4b8a-854e-251422819440" Dec 03 12:31:19 crc kubenswrapper[4849]: I1203 12:31:19.858318 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:19 crc kubenswrapper[4849]: I1203 12:31:19.858880 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:19 crc kubenswrapper[4849]: E1203 12:31:19.886929 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(74026d8fdd489750a894937fdf2d1755d99fdf44e6b51d8902c5241dccbe1191): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:19 crc kubenswrapper[4849]: E1203 12:31:19.886983 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(74026d8fdd489750a894937fdf2d1755d99fdf44e6b51d8902c5241dccbe1191): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:19 crc kubenswrapper[4849]: E1203 12:31:19.887002 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(74026d8fdd489750a894937fdf2d1755d99fdf44e6b51d8902c5241dccbe1191): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:19 crc kubenswrapper[4849]: E1203 12:31:19.887043 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-xtwpc_openshift-operators(49c7f06b-878f-4814-8196-b4a91e2fbca7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-xtwpc_openshift-operators_49c7f06b-878f-4814-8196-b4a91e2fbca7_0(74026d8fdd489750a894937fdf2d1755d99fdf44e6b51d8902c5241dccbe1191): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" podUID="49c7f06b-878f-4814-8196-b4a91e2fbca7" Dec 03 12:31:21 crc kubenswrapper[4849]: I1203 12:31:21.856285 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:21 crc kubenswrapper[4849]: I1203 12:31:21.857184 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:21 crc kubenswrapper[4849]: E1203 12:31:21.879281 4849 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(2bb60dcd24f46f687562f5a0c29babe294e39ef74ab54afa15fd17fc2e685d11): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 12:31:21 crc kubenswrapper[4849]: E1203 12:31:21.879331 4849 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(2bb60dcd24f46f687562f5a0c29babe294e39ef74ab54afa15fd17fc2e685d11): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:21 crc kubenswrapper[4849]: E1203 12:31:21.879351 4849 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(2bb60dcd24f46f687562f5a0c29babe294e39ef74ab54afa15fd17fc2e685d11): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:21 crc kubenswrapper[4849]: E1203 12:31:21.879396 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators(0af871d7-9f6d-49ba-97ad-d0d36cceed52)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_openshift-operators_0af871d7-9f6d-49ba-97ad-d0d36cceed52_0(2bb60dcd24f46f687562f5a0c29babe294e39ef74ab54afa15fd17fc2e685d11): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" podUID="0af871d7-9f6d-49ba-97ad-d0d36cceed52" Dec 03 12:31:27 crc kubenswrapper[4849]: I1203 12:31:27.856973 4849 scope.go:117] "RemoveContainer" containerID="ad36464d1d23439200024871f7f6e4beba4ab7b967317c2f0c2373d670d27098" Dec 03 12:31:28 crc kubenswrapper[4849]: I1203 12:31:28.165363 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-2pjsx_1b60c35d-f388-49eb-a5d8-09a6cc752575/kube-multus/2.log" Dec 03 12:31:28 crc kubenswrapper[4849]: I1203 12:31:28.165413 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-2pjsx" event={"ID":"1b60c35d-f388-49eb-a5d8-09a6cc752575","Type":"ContainerStarted","Data":"5a809215074ea4a8732a81796d4d41bdeb2c09cd135b898f42ca3ec0fbe9694e"} Dec 03 12:31:30 crc kubenswrapper[4849]: I1203 12:31:30.526736 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w447b" Dec 03 12:31:31 crc kubenswrapper[4849]: I1203 12:31:31.856284 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:31 crc kubenswrapper[4849]: I1203 12:31:31.856919 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" Dec 03 12:31:32 crc kubenswrapper[4849]: I1203 12:31:32.192342 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p"] Dec 03 12:31:32 crc kubenswrapper[4849]: W1203 12:31:32.201144 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffaeeb44_3be6_4407_9095_339c36ae6c58.slice/crio-2ef2fa7ea55267e02113b8c98f3ffed76059f807aee5bf9dd890716b1ac6bf05 WatchSource:0}: Error finding container 2ef2fa7ea55267e02113b8c98f3ffed76059f807aee5bf9dd890716b1ac6bf05: Status 404 returned error can't find the container with id 2ef2fa7ea55267e02113b8c98f3ffed76059f807aee5bf9dd890716b1ac6bf05 Dec 03 12:31:32 crc kubenswrapper[4849]: I1203 12:31:32.856062 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:32 crc kubenswrapper[4849]: I1203 12:31:32.856475 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" Dec 03 12:31:33 crc kubenswrapper[4849]: I1203 12:31:33.190869 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" event={"ID":"ffaeeb44-3be6-4407-9095-339c36ae6c58","Type":"ContainerStarted","Data":"2ef2fa7ea55267e02113b8c98f3ffed76059f807aee5bf9dd890716b1ac6bf05"} Dec 03 12:31:33 crc kubenswrapper[4849]: I1203 12:31:33.195453 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7"] Dec 03 12:31:33 crc kubenswrapper[4849]: W1203 12:31:33.199328 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4289cd35_0b12_4095_9c25_b071d4351d32.slice/crio-788cc1ae83125d954ac775e3321a744dceeb019e93738b4922141174eb63728a WatchSource:0}: Error finding container 788cc1ae83125d954ac775e3321a744dceeb019e93738b4922141174eb63728a: Status 404 returned error can't find the container with id 788cc1ae83125d954ac775e3321a744dceeb019e93738b4922141174eb63728a Dec 03 12:31:33 crc kubenswrapper[4849]: I1203 12:31:33.855666 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:33 crc kubenswrapper[4849]: I1203 12:31:33.860167 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.209686 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" event={"ID":"4289cd35-0b12-4095-9c25-b071d4351d32","Type":"ContainerStarted","Data":"788cc1ae83125d954ac775e3321a744dceeb019e93738b4922141174eb63728a"} Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.243397 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-xtwpc"] Dec 03 12:31:34 crc kubenswrapper[4849]: W1203 12:31:34.256764 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49c7f06b_878f_4814_8196_b4a91e2fbca7.slice/crio-403c17d8a6701c4b596bd56afac0d8b254d0767a5061f71bb79a197ce712b5a8 WatchSource:0}: Error finding container 403c17d8a6701c4b596bd56afac0d8b254d0767a5061f71bb79a197ce712b5a8: Status 404 returned error can't find the container with id 403c17d8a6701c4b596bd56afac0d8b254d0767a5061f71bb79a197ce712b5a8 Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.863409 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.863549 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.863849 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" Dec 03 12:31:34 crc kubenswrapper[4849]: I1203 12:31:34.864022 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:35 crc kubenswrapper[4849]: I1203 12:31:35.215016 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" event={"ID":"49c7f06b-878f-4814-8196-b4a91e2fbca7","Type":"ContainerStarted","Data":"403c17d8a6701c4b596bd56afac0d8b254d0767a5061f71bb79a197ce712b5a8"} Dec 03 12:31:36 crc kubenswrapper[4849]: I1203 12:31:36.621344 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-4p9g9"] Dec 03 12:31:36 crc kubenswrapper[4849]: I1203 12:31:36.649666 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz"] Dec 03 12:31:36 crc kubenswrapper[4849]: W1203 12:31:36.661163 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0af871d7_9f6d_49ba_97ad_d0d36cceed52.slice/crio-304069e52318b61b8be74b0930520c52bf49298115649ee1a419be0a9a118f7f WatchSource:0}: Error finding container 304069e52318b61b8be74b0930520c52bf49298115649ee1a419be0a9a118f7f: Status 404 returned error can't find the container with id 304069e52318b61b8be74b0930520c52bf49298115649ee1a419be0a9a118f7f Dec 03 12:31:37 crc kubenswrapper[4849]: I1203 12:31:37.225221 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" event={"ID":"ffaeeb44-3be6-4407-9095-339c36ae6c58","Type":"ContainerStarted","Data":"d66560204057b54d685ebbd67e0deeac33d98b287800370fbd97c43a52d75e3e"} Dec 03 12:31:37 crc kubenswrapper[4849]: I1203 12:31:37.227228 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" event={"ID":"0af871d7-9f6d-49ba-97ad-d0d36cceed52","Type":"ContainerStarted","Data":"e8e5374382946e4c9f402b2a2e12eb1a7579682cfc0a7de94953119566480e6b"} Dec 03 12:31:37 crc kubenswrapper[4849]: I1203 12:31:37.227267 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" event={"ID":"0af871d7-9f6d-49ba-97ad-d0d36cceed52","Type":"ContainerStarted","Data":"304069e52318b61b8be74b0930520c52bf49298115649ee1a419be0a9a118f7f"} Dec 03 12:31:37 crc kubenswrapper[4849]: I1203 12:31:37.228207 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" event={"ID":"64bb0224-aa3f-4b8a-854e-251422819440","Type":"ContainerStarted","Data":"9a6efb4e8c3c9040877a288589df150cc50bff4ae0ed7ccccfb542f0e6b5a840"} Dec 03 12:31:37 crc kubenswrapper[4849]: I1203 12:31:37.259544 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p" podStartSLOduration=32.164162558 podStartE2EDuration="36.25952904s" podCreationTimestamp="2025-12-03 12:31:01 +0000 UTC" firstStartedPulling="2025-12-03 12:31:32.204156155 +0000 UTC m=+638.666003937" lastFinishedPulling="2025-12-03 12:31:36.299522636 +0000 UTC m=+642.761370419" observedRunningTime="2025-12-03 12:31:37.238136905 +0000 UTC m=+643.699984688" watchObservedRunningTime="2025-12-03 12:31:37.25952904 +0000 UTC m=+643.721376822" Dec 03 12:31:38 crc kubenswrapper[4849]: I1203 12:31:38.233878 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" event={"ID":"4289cd35-0b12-4095-9c25-b071d4351d32","Type":"ContainerStarted","Data":"e93215281a5bb881afb711d20bd7e02aa8abc8101ac1183cd9323a94e306fdc7"} Dec 03 12:31:38 crc kubenswrapper[4849]: I1203 12:31:38.237285 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" event={"ID":"49c7f06b-878f-4814-8196-b4a91e2fbca7","Type":"ContainerStarted","Data":"e627fb3f46d1958532998057ada9f2c55e59c1e6ad7bcea3aa678e24135c6b0e"} Dec 03 12:31:38 crc kubenswrapper[4849]: I1203 12:31:38.248962 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-7zxv7" podStartSLOduration=32.695505423 podStartE2EDuration="37.248947859s" podCreationTimestamp="2025-12-03 12:31:01 +0000 UTC" firstStartedPulling="2025-12-03 12:31:33.202041966 +0000 UTC m=+639.663889749" lastFinishedPulling="2025-12-03 12:31:37.755484402 +0000 UTC m=+644.217332185" observedRunningTime="2025-12-03 12:31:38.247667352 +0000 UTC m=+644.709515135" watchObservedRunningTime="2025-12-03 12:31:38.248947859 +0000 UTC m=+644.710795643" Dec 03 12:31:38 crc kubenswrapper[4849]: I1203 12:31:38.252047 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-84db7f5694-67hvz" podStartSLOduration=37.252036677 podStartE2EDuration="37.252036677s" podCreationTimestamp="2025-12-03 12:31:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:31:37.263383648 +0000 UTC m=+643.725231431" watchObservedRunningTime="2025-12-03 12:31:38.252036677 +0000 UTC m=+644.713884461" Dec 03 12:31:38 crc kubenswrapper[4849]: I1203 12:31:38.269120 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" podStartSLOduration=33.7737194 podStartE2EDuration="37.269103048s" podCreationTimestamp="2025-12-03 12:31:01 +0000 UTC" firstStartedPulling="2025-12-03 12:31:34.258214118 +0000 UTC m=+640.720061900" lastFinishedPulling="2025-12-03 12:31:37.753597765 +0000 UTC m=+644.215445548" observedRunningTime="2025-12-03 12:31:38.26242899 +0000 UTC m=+644.724276772" watchObservedRunningTime="2025-12-03 12:31:38.269103048 +0000 UTC m=+644.730950832" Dec 03 12:31:39 crc kubenswrapper[4849]: I1203 12:31:39.242131 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:41 crc kubenswrapper[4849]: I1203 12:31:41.255251 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" event={"ID":"64bb0224-aa3f-4b8a-854e-251422819440","Type":"ContainerStarted","Data":"2655bef0ba9442f5b29727776020641505af5e12219d98b02e12f6ab0599da3b"} Dec 03 12:31:41 crc kubenswrapper[4849]: I1203 12:31:41.255466 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:41 crc kubenswrapper[4849]: I1203 12:31:41.270516 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" podStartSLOduration=36.398988986 podStartE2EDuration="40.270504053s" podCreationTimestamp="2025-12-03 12:31:01 +0000 UTC" firstStartedPulling="2025-12-03 12:31:36.627523862 +0000 UTC m=+643.089371645" lastFinishedPulling="2025-12-03 12:31:40.499038928 +0000 UTC m=+646.960886712" observedRunningTime="2025-12-03 12:31:41.26850834 +0000 UTC m=+647.730356123" watchObservedRunningTime="2025-12-03 12:31:41.270504053 +0000 UTC m=+647.732351836" Dec 03 12:31:41 crc kubenswrapper[4849]: I1203 12:31:41.281829 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-4p9g9" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.433489 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-26qw2"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.434530 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.435930 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.436088 4849 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-4kjxb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.436513 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.445108 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-26qw2"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.448555 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-d8cfb"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.450542 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-d8cfb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.452332 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrsrw\" (UniqueName: \"kubernetes.io/projected/a6a7a713-4a04-49fc-98b2-9c59610fa61b-kube-api-access-rrsrw\") pod \"cert-manager-cainjector-7f985d654d-26qw2\" (UID: \"a6a7a713-4a04-49fc-98b2-9c59610fa61b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.452431 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlkkx\" (UniqueName: \"kubernetes.io/projected/3f221c87-0071-43b0-986e-425c3d54a75a-kube-api-access-xlkkx\") pod \"cert-manager-5b446d88c5-d8cfb\" (UID: \"3f221c87-0071-43b0-986e-425c3d54a75a\") " pod="cert-manager/cert-manager-5b446d88c5-d8cfb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.452977 4849 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wbdnm" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.463061 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mg8hm"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.463857 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.466764 4849 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-p97l5" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.469089 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-d8cfb"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.472972 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mg8hm"] Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.554235 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gsqw\" (UniqueName: \"kubernetes.io/projected/236de675-5b13-478e-95d2-0f6da1047034-kube-api-access-4gsqw\") pod \"cert-manager-webhook-5655c58dd6-mg8hm\" (UID: \"236de675-5b13-478e-95d2-0f6da1047034\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.554339 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrsrw\" (UniqueName: \"kubernetes.io/projected/a6a7a713-4a04-49fc-98b2-9c59610fa61b-kube-api-access-rrsrw\") pod \"cert-manager-cainjector-7f985d654d-26qw2\" (UID: \"a6a7a713-4a04-49fc-98b2-9c59610fa61b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.554429 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlkkx\" (UniqueName: \"kubernetes.io/projected/3f221c87-0071-43b0-986e-425c3d54a75a-kube-api-access-xlkkx\") pod \"cert-manager-5b446d88c5-d8cfb\" (UID: \"3f221c87-0071-43b0-986e-425c3d54a75a\") " pod="cert-manager/cert-manager-5b446d88c5-d8cfb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.569228 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlkkx\" (UniqueName: \"kubernetes.io/projected/3f221c87-0071-43b0-986e-425c3d54a75a-kube-api-access-xlkkx\") pod \"cert-manager-5b446d88c5-d8cfb\" (UID: \"3f221c87-0071-43b0-986e-425c3d54a75a\") " pod="cert-manager/cert-manager-5b446d88c5-d8cfb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.569667 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrsrw\" (UniqueName: \"kubernetes.io/projected/a6a7a713-4a04-49fc-98b2-9c59610fa61b-kube-api-access-rrsrw\") pod \"cert-manager-cainjector-7f985d654d-26qw2\" (UID: \"a6a7a713-4a04-49fc-98b2-9c59610fa61b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.654922 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gsqw\" (UniqueName: \"kubernetes.io/projected/236de675-5b13-478e-95d2-0f6da1047034-kube-api-access-4gsqw\") pod \"cert-manager-webhook-5655c58dd6-mg8hm\" (UID: \"236de675-5b13-478e-95d2-0f6da1047034\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.669764 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gsqw\" (UniqueName: \"kubernetes.io/projected/236de675-5b13-478e-95d2-0f6da1047034-kube-api-access-4gsqw\") pod \"cert-manager-webhook-5655c58dd6-mg8hm\" (UID: \"236de675-5b13-478e-95d2-0f6da1047034\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.747517 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.761120 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-d8cfb" Dec 03 12:31:49 crc kubenswrapper[4849]: I1203 12:31:49.775204 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.102822 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-26qw2"] Dec 03 12:31:50 crc kubenswrapper[4849]: W1203 12:31:50.105087 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6a7a713_4a04_49fc_98b2_9c59610fa61b.slice/crio-4a9c6ec587388ed507a2a8eadf806168ab9c3f7a19797f28bfad719e3e9d4911 WatchSource:0}: Error finding container 4a9c6ec587388ed507a2a8eadf806168ab9c3f7a19797f28bfad719e3e9d4911: Status 404 returned error can't find the container with id 4a9c6ec587388ed507a2a8eadf806168ab9c3f7a19797f28bfad719e3e9d4911 Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.136179 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-d8cfb"] Dec 03 12:31:50 crc kubenswrapper[4849]: W1203 12:31:50.137365 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f221c87_0071_43b0_986e_425c3d54a75a.slice/crio-39916d5140d8185db07306f0a6c3b8b06106d84c44c5541403ec58baec20a694 WatchSource:0}: Error finding container 39916d5140d8185db07306f0a6c3b8b06106d84c44c5541403ec58baec20a694: Status 404 returned error can't find the container with id 39916d5140d8185db07306f0a6c3b8b06106d84c44c5541403ec58baec20a694 Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.173576 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-mg8hm"] Dec 03 12:31:50 crc kubenswrapper[4849]: W1203 12:31:50.176336 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod236de675_5b13_478e_95d2_0f6da1047034.slice/crio-ed8fa36f96ecdf721b40d35a548aad179e7a9abd24abb8a179c07e265cd2e362 WatchSource:0}: Error finding container ed8fa36f96ecdf721b40d35a548aad179e7a9abd24abb8a179c07e265cd2e362: Status 404 returned error can't find the container with id ed8fa36f96ecdf721b40d35a548aad179e7a9abd24abb8a179c07e265cd2e362 Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.292324 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" event={"ID":"a6a7a713-4a04-49fc-98b2-9c59610fa61b","Type":"ContainerStarted","Data":"4a9c6ec587388ed507a2a8eadf806168ab9c3f7a19797f28bfad719e3e9d4911"} Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.293471 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-d8cfb" event={"ID":"3f221c87-0071-43b0-986e-425c3d54a75a","Type":"ContainerStarted","Data":"39916d5140d8185db07306f0a6c3b8b06106d84c44c5541403ec58baec20a694"} Dec 03 12:31:50 crc kubenswrapper[4849]: I1203 12:31:50.294211 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" event={"ID":"236de675-5b13-478e-95d2-0f6da1047034","Type":"ContainerStarted","Data":"ed8fa36f96ecdf721b40d35a548aad179e7a9abd24abb8a179c07e265cd2e362"} Dec 03 12:31:51 crc kubenswrapper[4849]: I1203 12:31:51.768816 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-xtwpc" Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.309957 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" event={"ID":"236de675-5b13-478e-95d2-0f6da1047034","Type":"ContainerStarted","Data":"7896d0677c058ba19e5472a2111a9927c9b72a6122d9783bdebd00b944da6893"} Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.310174 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.311266 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" event={"ID":"a6a7a713-4a04-49fc-98b2-9c59610fa61b","Type":"ContainerStarted","Data":"51bc60cf071ec42c4dd17ce8882839bba272f8d4e1b92284db30cd9b72adb438"} Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.314090 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-d8cfb" event={"ID":"3f221c87-0071-43b0-986e-425c3d54a75a","Type":"ContainerStarted","Data":"8f175069aa9242ef0bad826bd9aec4f27d918bde65b085888ef07b996930ed13"} Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.325833 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" podStartSLOduration=1.784830039 podStartE2EDuration="4.325820398s" podCreationTimestamp="2025-12-03 12:31:49 +0000 UTC" firstStartedPulling="2025-12-03 12:31:50.178147207 +0000 UTC m=+656.639994990" lastFinishedPulling="2025-12-03 12:31:52.719137566 +0000 UTC m=+659.180985349" observedRunningTime="2025-12-03 12:31:53.321440151 +0000 UTC m=+659.783287924" watchObservedRunningTime="2025-12-03 12:31:53.325820398 +0000 UTC m=+659.787668181" Dec 03 12:31:53 crc kubenswrapper[4849]: I1203 12:31:53.336464 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-26qw2" podStartSLOduration=1.743858817 podStartE2EDuration="4.336449584s" podCreationTimestamp="2025-12-03 12:31:49 +0000 UTC" firstStartedPulling="2025-12-03 12:31:50.106895967 +0000 UTC m=+656.568743750" lastFinishedPulling="2025-12-03 12:31:52.699486735 +0000 UTC m=+659.161334517" observedRunningTime="2025-12-03 12:31:53.333131155 +0000 UTC m=+659.794978938" watchObservedRunningTime="2025-12-03 12:31:53.336449584 +0000 UTC m=+659.798297367" Dec 03 12:31:59 crc kubenswrapper[4849]: I1203 12:31:59.777913 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-mg8hm" Dec 03 12:31:59 crc kubenswrapper[4849]: I1203 12:31:59.789274 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-d8cfb" podStartSLOduration=8.22104176 podStartE2EDuration="10.789260129s" podCreationTimestamp="2025-12-03 12:31:49 +0000 UTC" firstStartedPulling="2025-12-03 12:31:50.139047364 +0000 UTC m=+656.600895147" lastFinishedPulling="2025-12-03 12:31:52.707265732 +0000 UTC m=+659.169113516" observedRunningTime="2025-12-03 12:31:53.344407057 +0000 UTC m=+659.806254840" watchObservedRunningTime="2025-12-03 12:31:59.789260129 +0000 UTC m=+666.251107912" Dec 03 12:32:17 crc kubenswrapper[4849]: I1203 12:32:17.989624 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x"] Dec 03 12:32:17 crc kubenswrapper[4849]: I1203 12:32:17.991770 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:17 crc kubenswrapper[4849]: I1203 12:32:17.994139 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:32:17 crc kubenswrapper[4849]: I1203 12:32:17.997407 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x"] Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.185739 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nll7\" (UniqueName: \"kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.185811 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.186010 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.287214 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.287362 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.287424 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nll7\" (UniqueName: \"kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.287687 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.287840 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.302150 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nll7\" (UniqueName: \"kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7\") pod \"a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.304927 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.400584 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx"] Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.403359 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.408872 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx"] Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.590999 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.591058 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29jk2\" (UniqueName: \"kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.591209 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.667268 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x"] Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.692149 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.692518 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29jk2\" (UniqueName: \"kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.692551 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.692630 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.692964 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.706969 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29jk2\" (UniqueName: \"kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2\") pod \"4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.723532 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:18 crc kubenswrapper[4849]: I1203 12:32:18.913897 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx"] Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.443061 4849 generic.go:334] "Generic (PLEG): container finished" podID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerID="ac92fbe20284fe0db1c7fed40e607aa8ad42f6d1e1714d3121632c6edcc654e1" exitCode=0 Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.443120 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" event={"ID":"3bf60e1a-b6a1-430a-bf47-776b86bf7c90","Type":"ContainerDied","Data":"ac92fbe20284fe0db1c7fed40e607aa8ad42f6d1e1714d3121632c6edcc654e1"} Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.443295 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" event={"ID":"3bf60e1a-b6a1-430a-bf47-776b86bf7c90","Type":"ContainerStarted","Data":"5f27b3149bd3fad835989c0f194efb6dc2afd5409819b87d98a730dbf06af8db"} Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.444593 4849 generic.go:334] "Generic (PLEG): container finished" podID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerID="c06c0e602bd004491e2f18454cfca74dc2884d2a343a39d459cdbf4cfb139e84" exitCode=0 Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.444614 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" event={"ID":"931dd621-537b-4ff8-96c6-f78015b8c33f","Type":"ContainerDied","Data":"c06c0e602bd004491e2f18454cfca74dc2884d2a343a39d459cdbf4cfb139e84"} Dec 03 12:32:19 crc kubenswrapper[4849]: I1203 12:32:19.444638 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" event={"ID":"931dd621-537b-4ff8-96c6-f78015b8c33f","Type":"ContainerStarted","Data":"446fdcc5c0a8104ac5230701b9d1a2385981ad95ca56f8fd271fd0ec321e78a0"} Dec 03 12:32:21 crc kubenswrapper[4849]: I1203 12:32:21.454299 4849 generic.go:334] "Generic (PLEG): container finished" podID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerID="c23e9e7524e19cb259cddbfefcef40d9f19384d5ce0b4887de65a19d567cfca0" exitCode=0 Dec 03 12:32:21 crc kubenswrapper[4849]: I1203 12:32:21.454395 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" event={"ID":"3bf60e1a-b6a1-430a-bf47-776b86bf7c90","Type":"ContainerDied","Data":"c23e9e7524e19cb259cddbfefcef40d9f19384d5ce0b4887de65a19d567cfca0"} Dec 03 12:32:22 crc kubenswrapper[4849]: I1203 12:32:22.460462 4849 generic.go:334] "Generic (PLEG): container finished" podID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerID="ae0a7c14184b798f816b98ee4155eb8f736b4f604cc9b4f6a8659699a6b7bc35" exitCode=0 Dec 03 12:32:22 crc kubenswrapper[4849]: I1203 12:32:22.460504 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" event={"ID":"3bf60e1a-b6a1-430a-bf47-776b86bf7c90","Type":"ContainerDied","Data":"ae0a7c14184b798f816b98ee4155eb8f736b4f604cc9b4f6a8659699a6b7bc35"} Dec 03 12:32:22 crc kubenswrapper[4849]: I1203 12:32:22.677146 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:32:22 crc kubenswrapper[4849]: I1203 12:32:22.677199 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.465708 4849 generic.go:334] "Generic (PLEG): container finished" podID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerID="70b5733f33f378cde8b338fc022f59422dda3670d2b2e014bdccc4450e47cfe7" exitCode=0 Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.465781 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" event={"ID":"931dd621-537b-4ff8-96c6-f78015b8c33f","Type":"ContainerDied","Data":"70b5733f33f378cde8b338fc022f59422dda3670d2b2e014bdccc4450e47cfe7"} Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.711658 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.851566 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util\") pod \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.851687 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nll7\" (UniqueName: \"kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7\") pod \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.851710 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle\") pod \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\" (UID: \"3bf60e1a-b6a1-430a-bf47-776b86bf7c90\") " Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.852504 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle" (OuterVolumeSpecName: "bundle") pod "3bf60e1a-b6a1-430a-bf47-776b86bf7c90" (UID: "3bf60e1a-b6a1-430a-bf47-776b86bf7c90"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.856448 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7" (OuterVolumeSpecName: "kube-api-access-4nll7") pod "3bf60e1a-b6a1-430a-bf47-776b86bf7c90" (UID: "3bf60e1a-b6a1-430a-bf47-776b86bf7c90"). InnerVolumeSpecName "kube-api-access-4nll7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.861589 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util" (OuterVolumeSpecName: "util") pod "3bf60e1a-b6a1-430a-bf47-776b86bf7c90" (UID: "3bf60e1a-b6a1-430a-bf47-776b86bf7c90"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.953484 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nll7\" (UniqueName: \"kubernetes.io/projected/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-kube-api-access-4nll7\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.953520 4849 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:23 crc kubenswrapper[4849]: I1203 12:32:23.953529 4849 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3bf60e1a-b6a1-430a-bf47-776b86bf7c90-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:24 crc kubenswrapper[4849]: I1203 12:32:24.472126 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" event={"ID":"3bf60e1a-b6a1-430a-bf47-776b86bf7c90","Type":"ContainerDied","Data":"5f27b3149bd3fad835989c0f194efb6dc2afd5409819b87d98a730dbf06af8db"} Dec 03 12:32:24 crc kubenswrapper[4849]: I1203 12:32:24.472167 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f27b3149bd3fad835989c0f194efb6dc2afd5409819b87d98a730dbf06af8db" Dec 03 12:32:24 crc kubenswrapper[4849]: I1203 12:32:24.472135 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x" Dec 03 12:32:24 crc kubenswrapper[4849]: I1203 12:32:24.473740 4849 generic.go:334] "Generic (PLEG): container finished" podID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerID="2802367b2840ef14cab736053e4b3c1704f88138f7d0fd1d2ea77f48c76f341c" exitCode=0 Dec 03 12:32:24 crc kubenswrapper[4849]: I1203 12:32:24.473775 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" event={"ID":"931dd621-537b-4ff8-96c6-f78015b8c33f","Type":"ContainerDied","Data":"2802367b2840ef14cab736053e4b3c1704f88138f7d0fd1d2ea77f48c76f341c"} Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.638139 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.774307 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util\") pod \"931dd621-537b-4ff8-96c6-f78015b8c33f\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.774423 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle\") pod \"931dd621-537b-4ff8-96c6-f78015b8c33f\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.774469 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29jk2\" (UniqueName: \"kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2\") pod \"931dd621-537b-4ff8-96c6-f78015b8c33f\" (UID: \"931dd621-537b-4ff8-96c6-f78015b8c33f\") " Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.775226 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle" (OuterVolumeSpecName: "bundle") pod "931dd621-537b-4ff8-96c6-f78015b8c33f" (UID: "931dd621-537b-4ff8-96c6-f78015b8c33f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.777383 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2" (OuterVolumeSpecName: "kube-api-access-29jk2") pod "931dd621-537b-4ff8-96c6-f78015b8c33f" (UID: "931dd621-537b-4ff8-96c6-f78015b8c33f"). InnerVolumeSpecName "kube-api-access-29jk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.781706 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util" (OuterVolumeSpecName: "util") pod "931dd621-537b-4ff8-96c6-f78015b8c33f" (UID: "931dd621-537b-4ff8-96c6-f78015b8c33f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.876510 4849 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.876541 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29jk2\" (UniqueName: \"kubernetes.io/projected/931dd621-537b-4ff8-96c6-f78015b8c33f-kube-api-access-29jk2\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:25 crc kubenswrapper[4849]: I1203 12:32:25.876553 4849 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/931dd621-537b-4ff8-96c6-f78015b8c33f-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:32:26 crc kubenswrapper[4849]: I1203 12:32:26.484665 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" event={"ID":"931dd621-537b-4ff8-96c6-f78015b8c33f","Type":"ContainerDied","Data":"446fdcc5c0a8104ac5230701b9d1a2385981ad95ca56f8fd271fd0ec321e78a0"} Dec 03 12:32:26 crc kubenswrapper[4849]: I1203 12:32:26.484697 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="446fdcc5c0a8104ac5230701b9d1a2385981ad95ca56f8fd271fd0ec321e78a0" Dec 03 12:32:26 crc kubenswrapper[4849]: I1203 12:32:26.484762 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930220 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2"] Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930765 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="util" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930778 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="util" Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930786 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="pull" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930791 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="pull" Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930797 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930803 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930824 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930829 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930841 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="pull" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930845 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="pull" Dec 03 12:32:33 crc kubenswrapper[4849]: E1203 12:32:33.930854 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="util" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930858 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="util" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930949 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="931dd621-537b-4ff8-96c6-f78015b8c33f" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.930967 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf60e1a-b6a1-430a-bf47-776b86bf7c90" containerName="extract" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.931481 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933093 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-z2cmf" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933319 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933509 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933763 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933819 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.933906 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.943231 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2"] Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.966204 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.966452 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-apiservice-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.966574 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drs4d\" (UniqueName: \"kubernetes.io/projected/b9050d43-e279-4812-89c0-7a9cce7f5f12-kube-api-access-drs4d\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.966708 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b9050d43-e279-4812-89c0-7a9cce7f5f12-manager-config\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:33 crc kubenswrapper[4849]: I1203 12:32:33.966944 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-webhook-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.067846 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-webhook-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.067883 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.067959 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-apiservice-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.067987 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drs4d\" (UniqueName: \"kubernetes.io/projected/b9050d43-e279-4812-89c0-7a9cce7f5f12-kube-api-access-drs4d\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.068019 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b9050d43-e279-4812-89c0-7a9cce7f5f12-manager-config\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.068869 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/b9050d43-e279-4812-89c0-7a9cce7f5f12-manager-config\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.072450 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.072475 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-apiservice-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.072498 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9050d43-e279-4812-89c0-7a9cce7f5f12-webhook-cert\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.083790 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drs4d\" (UniqueName: \"kubernetes.io/projected/b9050d43-e279-4812-89c0-7a9cce7f5f12-kube-api-access-drs4d\") pod \"loki-operator-controller-manager-866f574876-c6js2\" (UID: \"b9050d43-e279-4812-89c0-7a9cce7f5f12\") " pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.244442 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:34 crc kubenswrapper[4849]: I1203 12:32:34.591393 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2"] Dec 03 12:32:34 crc kubenswrapper[4849]: W1203 12:32:34.596978 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9050d43_e279_4812_89c0_7a9cce7f5f12.slice/crio-ca2baf609e2bc871a247dde525129473b0c5ed6e94d5e4b53c7814039f9619cc WatchSource:0}: Error finding container ca2baf609e2bc871a247dde525129473b0c5ed6e94d5e4b53c7814039f9619cc: Status 404 returned error can't find the container with id ca2baf609e2bc871a247dde525129473b0c5ed6e94d5e4b53c7814039f9619cc Dec 03 12:32:35 crc kubenswrapper[4849]: I1203 12:32:35.524009 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" event={"ID":"b9050d43-e279-4812-89c0-7a9cce7f5f12","Type":"ContainerStarted","Data":"ca2baf609e2bc871a247dde525129473b0c5ed6e94d5e4b53c7814039f9619cc"} Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.035259 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-6f9jr"] Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.036306 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.038237 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-vkxlm" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.038497 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.046372 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-6f9jr"] Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.046739 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.125301 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj8wz\" (UniqueName: \"kubernetes.io/projected/9dbb9334-5825-448c-9d4a-9d4b890e9dea-kube-api-access-hj8wz\") pod \"cluster-logging-operator-ff9846bd-6f9jr\" (UID: \"9dbb9334-5825-448c-9d4a-9d4b890e9dea\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.226430 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj8wz\" (UniqueName: \"kubernetes.io/projected/9dbb9334-5825-448c-9d4a-9d4b890e9dea-kube-api-access-hj8wz\") pod \"cluster-logging-operator-ff9846bd-6f9jr\" (UID: \"9dbb9334-5825-448c-9d4a-9d4b890e9dea\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.241633 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj8wz\" (UniqueName: \"kubernetes.io/projected/9dbb9334-5825-448c-9d4a-9d4b890e9dea-kube-api-access-hj8wz\") pod \"cluster-logging-operator-ff9846bd-6f9jr\" (UID: \"9dbb9334-5825-448c-9d4a-9d4b890e9dea\") " pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.348653 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.500024 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-ff9846bd-6f9jr"] Dec 03 12:32:39 crc kubenswrapper[4849]: W1203 12:32:39.506449 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dbb9334_5825_448c_9d4a_9d4b890e9dea.slice/crio-a193491f02c0cff5a7184d77e8eb35dc647cf8ca9c0ca345a73e78ba421cdbf8 WatchSource:0}: Error finding container a193491f02c0cff5a7184d77e8eb35dc647cf8ca9c0ca345a73e78ba421cdbf8: Status 404 returned error can't find the container with id a193491f02c0cff5a7184d77e8eb35dc647cf8ca9c0ca345a73e78ba421cdbf8 Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.548938 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" event={"ID":"b9050d43-e279-4812-89c0-7a9cce7f5f12","Type":"ContainerStarted","Data":"f2f79bd353d3fe901d40fb999ab210a76952159811e85fce9759d59c86c8d9db"} Dec 03 12:32:39 crc kubenswrapper[4849]: I1203 12:32:39.549740 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" event={"ID":"9dbb9334-5825-448c-9d4a-9d4b890e9dea","Type":"ContainerStarted","Data":"a193491f02c0cff5a7184d77e8eb35dc647cf8ca9c0ca345a73e78ba421cdbf8"} Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.581594 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" event={"ID":"b9050d43-e279-4812-89c0-7a9cce7f5f12","Type":"ContainerStarted","Data":"785177dd332d590bbee8cbd18c3d7c757a127430366d6ec4787973f2c666f4d0"} Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.581956 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.583469 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" event={"ID":"9dbb9334-5825-448c-9d4a-9d4b890e9dea","Type":"ContainerStarted","Data":"5e7203c802f7df8d321c1e2cccd5abe690c364b7a45dd3f5ca0448517ecbffbe"} Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.585064 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.601467 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-866f574876-c6js2" podStartSLOduration=2.728625783 podStartE2EDuration="12.601454429s" podCreationTimestamp="2025-12-03 12:32:33 +0000 UTC" firstStartedPulling="2025-12-03 12:32:34.598517474 +0000 UTC m=+701.060365257" lastFinishedPulling="2025-12-03 12:32:44.47134612 +0000 UTC m=+710.933193903" observedRunningTime="2025-12-03 12:32:45.596745385 +0000 UTC m=+712.058593167" watchObservedRunningTime="2025-12-03 12:32:45.601454429 +0000 UTC m=+712.063302212" Dec 03 12:32:45 crc kubenswrapper[4849]: I1203 12:32:45.631479 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-ff9846bd-6f9jr" podStartSLOduration=1.665987623 podStartE2EDuration="6.631465061s" podCreationTimestamp="2025-12-03 12:32:39 +0000 UTC" firstStartedPulling="2025-12-03 12:32:39.509305225 +0000 UTC m=+705.971153007" lastFinishedPulling="2025-12-03 12:32:44.474782662 +0000 UTC m=+710.936630445" observedRunningTime="2025-12-03 12:32:45.62800232 +0000 UTC m=+712.089850103" watchObservedRunningTime="2025-12-03 12:32:45.631465061 +0000 UTC m=+712.093312844" Dec 03 12:32:52 crc kubenswrapper[4849]: I1203 12:32:52.677500 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:32:52 crc kubenswrapper[4849]: I1203 12:32:52.677882 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.564948 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.565623 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.567583 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.568365 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.570916 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.619982 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6sjd\" (UniqueName: \"kubernetes.io/projected/1dfa6469-1621-46e3-b206-3508e316f36c-kube-api-access-h6sjd\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.620064 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5888e608-883b-4038-9abf-bb2dd28410da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5888e608-883b-4038-9abf-bb2dd28410da\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.721027 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5888e608-883b-4038-9abf-bb2dd28410da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5888e608-883b-4038-9abf-bb2dd28410da\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.721115 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6sjd\" (UniqueName: \"kubernetes.io/projected/1dfa6469-1621-46e3-b206-3508e316f36c-kube-api-access-h6sjd\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.723417 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.723444 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5888e608-883b-4038-9abf-bb2dd28410da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5888e608-883b-4038-9abf-bb2dd28410da\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cf4ffdf764809d93d25f5a506d82b8d5a8a7e5dd9e8b4fba154aafb9ffea78c2/globalmount\"" pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.735608 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6sjd\" (UniqueName: \"kubernetes.io/projected/1dfa6469-1621-46e3-b206-3508e316f36c-kube-api-access-h6sjd\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.741426 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5888e608-883b-4038-9abf-bb2dd28410da\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5888e608-883b-4038-9abf-bb2dd28410da\") pod \"minio\" (UID: \"1dfa6469-1621-46e3-b206-3508e316f36c\") " pod="minio-dev/minio" Dec 03 12:32:53 crc kubenswrapper[4849]: I1203 12:32:53.879269 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 12:32:54 crc kubenswrapper[4849]: I1203 12:32:54.217831 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 12:32:54 crc kubenswrapper[4849]: W1203 12:32:54.224751 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dfa6469_1621_46e3_b206_3508e316f36c.slice/crio-a8b6d48a441665b90201e6d902ee8eae06ce4e99b6db29fe63792f839ad08a02 WatchSource:0}: Error finding container a8b6d48a441665b90201e6d902ee8eae06ce4e99b6db29fe63792f839ad08a02: Status 404 returned error can't find the container with id a8b6d48a441665b90201e6d902ee8eae06ce4e99b6db29fe63792f839ad08a02 Dec 03 12:32:54 crc kubenswrapper[4849]: I1203 12:32:54.621836 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1dfa6469-1621-46e3-b206-3508e316f36c","Type":"ContainerStarted","Data":"a8b6d48a441665b90201e6d902ee8eae06ce4e99b6db29fe63792f839ad08a02"} Dec 03 12:32:57 crc kubenswrapper[4849]: I1203 12:32:57.638881 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"1dfa6469-1621-46e3-b206-3508e316f36c","Type":"ContainerStarted","Data":"8e1fd3928d759fc5919cb9649cd17c9816ec2715a65c179bb9ace7eadf3ce191"} Dec 03 12:32:57 crc kubenswrapper[4849]: I1203 12:32:57.665858 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.102037592 podStartE2EDuration="6.665836388s" podCreationTimestamp="2025-12-03 12:32:51 +0000 UTC" firstStartedPulling="2025-12-03 12:32:54.226510857 +0000 UTC m=+720.688358640" lastFinishedPulling="2025-12-03 12:32:56.790309663 +0000 UTC m=+723.252157436" observedRunningTime="2025-12-03 12:32:57.662789608 +0000 UTC m=+724.124637392" watchObservedRunningTime="2025-12-03 12:32:57.665836388 +0000 UTC m=+724.127684172" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.986214 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-skzmr"] Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.987091 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.990184 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-dockercfg-gbv2g" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.991541 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-config" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.992783 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-http" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.992820 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-ca-bundle" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.992820 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-distributor-grpc" Dec 03 12:33:00 crc kubenswrapper[4849]: I1203 12:33:00.996595 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-skzmr"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.123581 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.123694 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.123834 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.123859 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfbgp\" (UniqueName: \"kubernetes.io/projected/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-kube-api-access-nfbgp\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.123983 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-config\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.139439 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-cn9h6"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.140374 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.144871 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-http" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.144871 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-querier-grpc" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.144959 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-s3" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.148636 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-cn9h6"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.186796 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.187521 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.189299 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-http" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.189311 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-query-frontend-grpc" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.195458 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225110 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225151 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225188 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225605 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfbgp\" (UniqueName: \"kubernetes.io/projected/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-kube-api-access-nfbgp\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225716 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-config\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.225756 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226044 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2x9v\" (UniqueName: \"kubernetes.io/projected/77037998-c847-4702-a80d-2c295922cb04-kube-api-access-p2x9v\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226099 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226125 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-config\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226159 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226176 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.226588 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-config\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.227230 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-ca-bundle\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.231387 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-http\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-http\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.234805 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-logging-loki-distributor-grpc\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.264700 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.265583 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.266978 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway-ca-bundle" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.271266 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfbgp\" (UniqueName: \"kubernetes.io/projected/08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9-kube-api-access-nfbgp\") pod \"logging-loki-distributor-76cc67bf56-skzmr\" (UID: \"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9\") " pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.271395 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-client-http" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.271535 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.271714 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-http" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.284139 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"logging-loki-gateway" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.284574 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.285458 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.286903 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-gateway-dockercfg-8mljl" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.293076 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.302050 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.309980 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf"] Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327252 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327289 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327334 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-rbac\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327352 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tenants\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327367 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxtzx\" (UniqueName: \"kubernetes.io/projected/91698613-76b4-41b6-bd44-3197f5e6f5f8-kube-api-access-lxtzx\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327389 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327404 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327422 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327438 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327462 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-config\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327475 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327495 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327510 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-rbac\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327528 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327543 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327555 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9nkn\" (UniqueName: \"kubernetes.io/projected/f625b073-28c5-4b22-8a1e-2e43237fc19b-kube-api-access-w9nkn\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327571 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327585 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327606 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327622 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327636 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327667 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327680 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327770 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tenants\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327795 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2x9v\" (UniqueName: \"kubernetes.io/projected/77037998-c847-4702-a80d-2c295922cb04-kube-api-access-p2x9v\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327816 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x22jf\" (UniqueName: \"kubernetes.io/projected/1e2e8068-2365-43fe-8b38-ad21b6007471-kube-api-access-x22jf\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.327837 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-config\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.330626 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-config\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.332940 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77037998-c847-4702-a80d-2c295922cb04-logging-loki-ca-bundle\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.335845 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-http\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-http\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.337249 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-querier-grpc\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-querier-grpc\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.343957 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/77037998-c847-4702-a80d-2c295922cb04-logging-loki-s3\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.360150 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2x9v\" (UniqueName: \"kubernetes.io/projected/77037998-c847-4702-a80d-2c295922cb04-kube-api-access-p2x9v\") pod \"logging-loki-querier-5895d59bb8-cn9h6\" (UID: \"77037998-c847-4702-a80d-2c295922cb04\") " pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428611 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxtzx\" (UniqueName: \"kubernetes.io/projected/91698613-76b4-41b6-bd44-3197f5e6f5f8-kube-api-access-lxtzx\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428849 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428877 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428913 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-config\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428929 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428957 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.428973 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-rbac\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429001 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429016 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429031 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9nkn\" (UniqueName: \"kubernetes.io/projected/f625b073-28c5-4b22-8a1e-2e43237fc19b-kube-api-access-w9nkn\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429050 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429066 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429095 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429116 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429135 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429153 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429185 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tenants\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429224 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x22jf\" (UniqueName: \"kubernetes.io/projected/1e2e8068-2365-43fe-8b38-ad21b6007471-kube-api-access-x22jf\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429260 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429305 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-rbac\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.429322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tenants\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: E1203 12:33:01.430039 4849 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 03 12:33:01 crc kubenswrapper[4849]: E1203 12:33:01.430090 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret podName:f625b073-28c5-4b22-8a1e-2e43237fc19b nodeName:}" failed. No retries permitted until 2025-12-03 12:33:01.930075613 +0000 UTC m=+728.391923397 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret") pod "logging-loki-gateway-9f98ffcc5-9lgrf" (UID: "f625b073-28c5-4b22-8a1e-2e43237fc19b") : secret "logging-loki-gateway-http" not found Dec 03 12:33:01 crc kubenswrapper[4849]: E1203 12:33:01.430826 4849 secret.go:188] Couldn't get secret openshift-logging/logging-loki-gateway-http: secret "logging-loki-gateway-http" not found Dec 03 12:33:01 crc kubenswrapper[4849]: E1203 12:33:01.430871 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret podName:91698613-76b4-41b6-bd44-3197f5e6f5f8 nodeName:}" failed. No retries permitted until 2025-12-03 12:33:01.93086052 +0000 UTC m=+728.392708302 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret") pod "logging-loki-gateway-9f98ffcc5-5nz6b" (UID: "91698613-76b4-41b6-bd44-3197f5e6f5f8") : secret "logging-loki-gateway-http" not found Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.431183 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.431954 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-config\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.432359 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.432629 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.432822 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-ca-bundle\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.433048 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-rbac\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.433798 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-rbac\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.433836 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/f625b073-28c5-4b22-8a1e-2e43237fc19b-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.434146 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-ca-bundle\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.434361 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tenants\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.434718 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/91698613-76b4-41b6-bd44-3197f5e6f5f8-lokistack-gateway\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.436917 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tenants\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.437219 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-http\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.437329 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.437556 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/1e2e8068-2365-43fe-8b38-ad21b6007471-logging-loki-query-frontend-grpc\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.439341 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-logging-loki-gateway-client-http\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.443597 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxtzx\" (UniqueName: \"kubernetes.io/projected/91698613-76b4-41b6-bd44-3197f5e6f5f8-kube-api-access-lxtzx\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.444246 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x22jf\" (UniqueName: \"kubernetes.io/projected/1e2e8068-2365-43fe-8b38-ad21b6007471-kube-api-access-x22jf\") pod \"logging-loki-query-frontend-84558f7c9f-p8jpz\" (UID: \"1e2e8068-2365-43fe-8b38-ad21b6007471\") " pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.444996 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9nkn\" (UniqueName: \"kubernetes.io/projected/f625b073-28c5-4b22-8a1e-2e43237fc19b-kube-api-access-w9nkn\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.460620 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.509202 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.720695 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-distributor-76cc67bf56-skzmr"] Dec 03 12:33:01 crc kubenswrapper[4849]: W1203 12:33:01.724797 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08d80fb5_b663_48cc_9ec7_0c4fbe92d7f9.slice/crio-bc962c9c9dac78dd412bc4196a91f658a86a722423e6132d49ae7413551b7a6d WatchSource:0}: Error finding container bc962c9c9dac78dd412bc4196a91f658a86a722423e6132d49ae7413551b7a6d: Status 404 returned error can't find the container with id bc962c9c9dac78dd412bc4196a91f658a86a722423e6132d49ae7413551b7a6d Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.808554 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-querier-5895d59bb8-cn9h6"] Dec 03 12:33:01 crc kubenswrapper[4849]: W1203 12:33:01.811118 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77037998_c847_4702_a80d_2c295922cb04.slice/crio-69fc05831f7639fcc3ff5491fa44dfde2ad44c845910aa1a3c04d6766d3a7303 WatchSource:0}: Error finding container 69fc05831f7639fcc3ff5491fa44dfde2ad44c845910aa1a3c04d6766d3a7303: Status 404 returned error can't find the container with id 69fc05831f7639fcc3ff5491fa44dfde2ad44c845910aa1a3c04d6766d3a7303 Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.888943 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz"] Dec 03 12:33:01 crc kubenswrapper[4849]: W1203 12:33:01.892711 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e2e8068_2365_43fe_8b38_ad21b6007471.slice/crio-8e1b9accecad8a0e7ae83e716c411c286168ee6689312dca2662e2b288883c37 WatchSource:0}: Error finding container 8e1b9accecad8a0e7ae83e716c411c286168ee6689312dca2662e2b288883c37: Status 404 returned error can't find the container with id 8e1b9accecad8a0e7ae83e716c411c286168ee6689312dca2662e2b288883c37 Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.936207 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.936253 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.939065 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/91698613-76b4-41b6-bd44-3197f5e6f5f8-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-5nz6b\" (UID: \"91698613-76b4-41b6-bd44-3197f5e6f5f8\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:01 crc kubenswrapper[4849]: I1203 12:33:01.939155 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/f625b073-28c5-4b22-8a1e-2e43237fc19b-tls-secret\") pod \"logging-loki-gateway-9f98ffcc5-9lgrf\" (UID: \"f625b073-28c5-4b22-8a1e-2e43237fc19b\") " pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.121672 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.122383 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.123925 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-grpc" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.125153 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-ingester-http" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.131911 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.183839 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.185257 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.188063 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-grpc" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.189130 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-compactor-http" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.192420 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.202580 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.212745 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.235072 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.235886 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.237816 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-http" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.237934 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"logging-loki-index-gateway-grpc" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.241354 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ccc4da30-99aa-4dac-8789-613b09851895\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ccc4da30-99aa-4dac-8789-613b09851895\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.241561 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-config\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.241747 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.241900 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.242011 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.242157 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.242501 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79fnz\" (UniqueName: \"kubernetes.io/projected/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-kube-api-access-79fnz\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.242631 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.244258 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344134 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344179 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344233 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344250 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344277 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344292 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbvr9\" (UniqueName: \"kubernetes.io/projected/bd46349d-af06-4123-a47e-634e322c840b-kube-api-access-mbvr9\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344309 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344333 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344358 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344376 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344395 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344438 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344456 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ccc4da30-99aa-4dac-8789-613b09851895\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ccc4da30-99aa-4dac-8789-613b09851895\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344474 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-config\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344494 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-config\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344519 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344534 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344560 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.344932 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-config\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.346243 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79fnz\" (UniqueName: \"kubernetes.io/projected/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-kube-api-access-79fnz\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.346267 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.346302 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8zmr\" (UniqueName: \"kubernetes.io/projected/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-kube-api-access-s8zmr\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.346855 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-config\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.347979 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ca-bundle\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.348422 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.348459 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ccc4da30-99aa-4dac-8789-613b09851895\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ccc4da30-99aa-4dac-8789-613b09851895\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36cb94800efa0a8d6c9315c323a66ff907ffa9e6c5280278be2331a3c5c2bca8/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.349668 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.349689 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2af957afa094fd10047c1bbd08f2956193b44fc93bba0c53f7f62fa28fd87a18/globalmount\"" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.350529 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-grpc\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.350910 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-s3\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.357802 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ingester-http\" (UniqueName: \"kubernetes.io/secret/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-logging-loki-ingester-http\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.362747 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79fnz\" (UniqueName: \"kubernetes.io/projected/39d1c4cc-1f24-4c05-b7af-87cba182e3e6-kube-api-access-79fnz\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.373885 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ccc4da30-99aa-4dac-8789-613b09851895\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ccc4da30-99aa-4dac-8789-613b09851895\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.374182 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-69b4d611-1e0a-4b14-bf41-2aca6bd25c25\") pod \"logging-loki-ingester-0\" (UID: \"39d1c4cc-1f24-4c05-b7af-87cba182e3e6\") " pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.437280 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454053 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454098 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-config\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454130 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454148 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454179 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454205 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-config\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454240 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8zmr\" (UniqueName: \"kubernetes.io/projected/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-kube-api-access-s8zmr\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454262 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454279 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454343 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454361 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbvr9\" (UniqueName: \"kubernetes.io/projected/bd46349d-af06-4123-a47e-634e322c840b-kube-api-access-mbvr9\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454402 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.454418 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.456616 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-ca-bundle\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.457709 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-config\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.459468 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-ca-bundle\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.464588 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd46349d-af06-4123-a47e-634e322c840b-config\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.477072 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-http\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-http\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.479550 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.479593 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/76626e193a4e4e1ffa5c5f930b7e89c0e66e2d4fbf6566e3b76a40c86f7c061e/globalmount\"" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.479971 4849 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.480001 4849 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0e56b9c149f0fd92b9d1851fa9e9dff3f9e4f21361c9cd5beeda09238bb01c14/globalmount\"" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.485812 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-s3\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.492287 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-s3\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.493113 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-logging-loki-compactor-grpc\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.493663 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbvr9\" (UniqueName: \"kubernetes.io/projected/bd46349d-af06-4123-a47e-634e322c840b-kube-api-access-mbvr9\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.493673 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-grpc\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.496163 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/bd46349d-af06-4123-a47e-634e322c840b-logging-loki-index-gateway-http\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.498225 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8zmr\" (UniqueName: \"kubernetes.io/projected/59f2429e-ddd5-463a-88c2-35bb8c8e2faf-kube-api-access-s8zmr\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.547146 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20cb84ab-d74e-4198-8a18-58b522b7ebf8\") pod \"logging-loki-compactor-0\" (UID: \"59f2429e-ddd5-463a-88c2-35bb8c8e2faf\") " pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.554365 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7b4f07a7-85c9-4805-9310-1d26ce1f5098\") pod \"logging-loki-index-gateway-0\" (UID: \"bd46349d-af06-4123-a47e-634e322c840b\") " pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.578662 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.585169 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.624079 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf"] Dec 03 12:33:02 crc kubenswrapper[4849]: W1203 12:33:02.624438 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf625b073_28c5_4b22_8a1e_2e43237fc19b.slice/crio-f8e3cfec128722ea0421eb82959c03a08d5f6d0bc1ca639d81847ff9c06f6266 WatchSource:0}: Error finding container f8e3cfec128722ea0421eb82959c03a08d5f6d0bc1ca639d81847ff9c06f6266: Status 404 returned error can't find the container with id f8e3cfec128722ea0421eb82959c03a08d5f6d0bc1ca639d81847ff9c06f6266 Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.663522 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" event={"ID":"1e2e8068-2365-43fe-8b38-ad21b6007471","Type":"ContainerStarted","Data":"8e1b9accecad8a0e7ae83e716c411c286168ee6689312dca2662e2b288883c37"} Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.665627 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" event={"ID":"77037998-c847-4702-a80d-2c295922cb04","Type":"ContainerStarted","Data":"69fc05831f7639fcc3ff5491fa44dfde2ad44c845910aa1a3c04d6766d3a7303"} Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.669044 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" event={"ID":"f625b073-28c5-4b22-8a1e-2e43237fc19b","Type":"ContainerStarted","Data":"f8e3cfec128722ea0421eb82959c03a08d5f6d0bc1ca639d81847ff9c06f6266"} Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.671427 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" event={"ID":"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9","Type":"ContainerStarted","Data":"bc962c9c9dac78dd412bc4196a91f658a86a722423e6132d49ae7413551b7a6d"} Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.672452 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" event={"ID":"91698613-76b4-41b6-bd44-3197f5e6f5f8","Type":"ContainerStarted","Data":"fe3c24e01cc593df3edc82ff4bf2b12eb7f6d77e7af2e9dcd5c67ac39b150270"} Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.700765 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-ingester-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.801236 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:02 crc kubenswrapper[4849]: I1203 12:33:02.957833 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-index-gateway-0"] Dec 03 12:33:02 crc kubenswrapper[4849]: W1203 12:33:02.959254 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd46349d_af06_4123_a47e_634e322c840b.slice/crio-3f22134424af36767030b2ca3e5a1aa6504e78f8f15089da913a9ff4555396a0 WatchSource:0}: Error finding container 3f22134424af36767030b2ca3e5a1aa6504e78f8f15089da913a9ff4555396a0: Status 404 returned error can't find the container with id 3f22134424af36767030b2ca3e5a1aa6504e78f8f15089da913a9ff4555396a0 Dec 03 12:33:03 crc kubenswrapper[4849]: I1203 12:33:03.145673 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/logging-loki-compactor-0"] Dec 03 12:33:03 crc kubenswrapper[4849]: I1203 12:33:03.682915 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"bd46349d-af06-4123-a47e-634e322c840b","Type":"ContainerStarted","Data":"3f22134424af36767030b2ca3e5a1aa6504e78f8f15089da913a9ff4555396a0"} Dec 03 12:33:03 crc kubenswrapper[4849]: I1203 12:33:03.684384 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"39d1c4cc-1f24-4c05-b7af-87cba182e3e6","Type":"ContainerStarted","Data":"c0962226db9387fa646943996cf7bb042a98d631e7f240c4f114b1557d301640"} Dec 03 12:33:03 crc kubenswrapper[4849]: I1203 12:33:03.685541 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"59f2429e-ddd5-463a-88c2-35bb8c8e2faf","Type":"ContainerStarted","Data":"09b7c98458b151621b0d43417886d80a78e12f4fd27af06f21d5963c59811fc3"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.700487 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" event={"ID":"f625b073-28c5-4b22-8a1e-2e43237fc19b","Type":"ContainerStarted","Data":"13d73e8ef8188db2f971e31ce7c7d1e51f84a4ff10d076f0c577af81b25e07cc"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.702188 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" event={"ID":"08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9","Type":"ContainerStarted","Data":"2f5ea07160d06b456e23fdb99e0433f7144968403ca140add3654db0f286ce13"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.702303 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.704020 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" event={"ID":"91698613-76b4-41b6-bd44-3197f5e6f5f8","Type":"ContainerStarted","Data":"99cd1d24cc2a8ef61d870d6a2d63caafe494db14ad8fb22393feb788bd793a5d"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.704933 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-ingester-0" event={"ID":"39d1c4cc-1f24-4c05-b7af-87cba182e3e6","Type":"ContainerStarted","Data":"05143577885a6f25c7edf58b3c8084290275e03114545f7f69521bd1b20e86d1"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.705118 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.712419 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-index-gateway-0" event={"ID":"bd46349d-af06-4123-a47e-634e322c840b","Type":"ContainerStarted","Data":"876cfc0d6acc82292e1b6a09f86ad99d07f984df9cd7ee9ed3d42163709028fb"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.712513 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.718566 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" podStartSLOduration=2.471514829 podStartE2EDuration="5.718555227s" podCreationTimestamp="2025-12-03 12:33:00 +0000 UTC" firstStartedPulling="2025-12-03 12:33:01.726584371 +0000 UTC m=+728.188432154" lastFinishedPulling="2025-12-03 12:33:04.973624768 +0000 UTC m=+731.435472552" observedRunningTime="2025-12-03 12:33:05.715033003 +0000 UTC m=+732.176880786" watchObservedRunningTime="2025-12-03 12:33:05.718555227 +0000 UTC m=+732.180403010" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.724330 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" event={"ID":"1e2e8068-2365-43fe-8b38-ad21b6007471","Type":"ContainerStarted","Data":"035e59548e16b3189b4898e7982e0513d48d6baf11c10dde6597eaf73cbf2181"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.724435 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.727036 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" event={"ID":"77037998-c847-4702-a80d-2c295922cb04","Type":"ContainerStarted","Data":"8c6acf21acbefebf3dde5d36888b3dbc73e028fb39240ba1ffea00ea540e5436"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.727146 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.728346 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-index-gateway-0" podStartSLOduration=2.699337329 podStartE2EDuration="4.728335418s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:02.961813851 +0000 UTC m=+729.423661633" lastFinishedPulling="2025-12-03 12:33:04.990811939 +0000 UTC m=+731.452659722" observedRunningTime="2025-12-03 12:33:05.725753854 +0000 UTC m=+732.187601637" watchObservedRunningTime="2025-12-03 12:33:05.728335418 +0000 UTC m=+732.190183201" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.729123 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-compactor-0" event={"ID":"59f2429e-ddd5-463a-88c2-35bb8c8e2faf","Type":"ContainerStarted","Data":"526d72254470bc7a2b32a6704f67df63081d660223ea3cc5fb57360b6bf4e1cb"} Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.729230 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.743561 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-ingester-0" podStartSLOduration=2.459816098 podStartE2EDuration="4.743547175s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:02.707133351 +0000 UTC m=+729.168981134" lastFinishedPulling="2025-12-03 12:33:04.990864427 +0000 UTC m=+731.452712211" observedRunningTime="2025-12-03 12:33:05.739788406 +0000 UTC m=+732.201636189" watchObservedRunningTime="2025-12-03 12:33:05.743547175 +0000 UTC m=+732.205394958" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.755112 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" podStartSLOduration=1.578219348 podStartE2EDuration="4.7550946s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:01.812877345 +0000 UTC m=+728.274725128" lastFinishedPulling="2025-12-03 12:33:04.989752607 +0000 UTC m=+731.451600380" observedRunningTime="2025-12-03 12:33:05.753640003 +0000 UTC m=+732.215487787" watchObservedRunningTime="2025-12-03 12:33:05.7550946 +0000 UTC m=+732.216942382" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.764397 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" podStartSLOduration=1.671148381 podStartE2EDuration="4.764384649s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:01.894941568 +0000 UTC m=+728.356789351" lastFinishedPulling="2025-12-03 12:33:04.988177835 +0000 UTC m=+731.450025619" observedRunningTime="2025-12-03 12:33:05.764138797 +0000 UTC m=+732.225986580" watchObservedRunningTime="2025-12-03 12:33:05.764384649 +0000 UTC m=+732.226232432" Dec 03 12:33:05 crc kubenswrapper[4849]: I1203 12:33:05.777357 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-compactor-0" podStartSLOduration=2.938370424 podStartE2EDuration="4.7773442s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:03.151957887 +0000 UTC m=+729.613805671" lastFinishedPulling="2025-12-03 12:33:04.990931665 +0000 UTC m=+731.452779447" observedRunningTime="2025-12-03 12:33:05.776196942 +0000 UTC m=+732.238044725" watchObservedRunningTime="2025-12-03 12:33:05.7773442 +0000 UTC m=+732.239191983" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.750317 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" event={"ID":"f625b073-28c5-4b22-8a1e-2e43237fc19b","Type":"ContainerStarted","Data":"a435ee37b4400ab789175cd78c4bd53af060b7b72ecab40ca48a40c3e54ae229"} Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.750707 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.750719 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.752837 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" event={"ID":"91698613-76b4-41b6-bd44-3197f5e6f5f8","Type":"ContainerStarted","Data":"75be5a8c461d13bff919db091cecb660cf84a34abcb82fce34a40daa7e2fdda4"} Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.753977 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.754008 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.758877 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.760601 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.760872 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.768017 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.769851 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-9lgrf" podStartSLOduration=2.368516989 podStartE2EDuration="8.769837799s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:02.628922345 +0000 UTC m=+729.090770128" lastFinishedPulling="2025-12-03 12:33:09.030243154 +0000 UTC m=+735.492090938" observedRunningTime="2025-12-03 12:33:09.765117042 +0000 UTC m=+736.226964824" watchObservedRunningTime="2025-12-03 12:33:09.769837799 +0000 UTC m=+736.231685581" Dec 03 12:33:09 crc kubenswrapper[4849]: I1203 12:33:09.781354 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/logging-loki-gateway-9f98ffcc5-5nz6b" podStartSLOduration=2.337535148 podStartE2EDuration="8.781338106s" podCreationTimestamp="2025-12-03 12:33:01 +0000 UTC" firstStartedPulling="2025-12-03 12:33:02.583167232 +0000 UTC m=+729.045015015" lastFinishedPulling="2025-12-03 12:33:09.026970179 +0000 UTC m=+735.488817973" observedRunningTime="2025-12-03 12:33:09.778215603 +0000 UTC m=+736.240063386" watchObservedRunningTime="2025-12-03 12:33:09.781338106 +0000 UTC m=+736.243185888" Dec 03 12:33:19 crc kubenswrapper[4849]: I1203 12:33:19.102230 4849 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 12:33:21 crc kubenswrapper[4849]: I1203 12:33:21.308501 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-distributor-76cc67bf56-skzmr" Dec 03 12:33:21 crc kubenswrapper[4849]: I1203 12:33:21.465415 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-querier-5895d59bb8-cn9h6" Dec 03 12:33:21 crc kubenswrapper[4849]: I1203 12:33:21.514924 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-query-frontend-84558f7c9f-p8jpz" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.442820 4849 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.442864 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="39d1c4cc-1f24-4c05-b7af-87cba182e3e6" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.590970 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-index-gateway-0" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.677088 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.677144 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.677184 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.677804 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.677857 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67" gracePeriod=600 Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.809797 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-compactor-0" Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.819912 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67" exitCode=0 Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.820003 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67"} Dec 03 12:33:22 crc kubenswrapper[4849]: I1203 12:33:22.820077 4849 scope.go:117] "RemoveContainer" containerID="9a18b9deb4424c2bff6208213fb1170494c2f36474a4b5d0eada48c5afabce44" Dec 03 12:33:23 crc kubenswrapper[4849]: I1203 12:33:23.826470 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe"} Dec 03 12:33:32 crc kubenswrapper[4849]: I1203 12:33:32.441764 4849 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: this instance owns no tokens Dec 03 12:33:32 crc kubenswrapper[4849]: I1203 12:33:32.442708 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="39d1c4cc-1f24-4c05-b7af-87cba182e3e6" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:33:42 crc kubenswrapper[4849]: I1203 12:33:42.441169 4849 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 03 12:33:42 crc kubenswrapper[4849]: I1203 12:33:42.441527 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="39d1c4cc-1f24-4c05-b7af-87cba182e3e6" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:33:52 crc kubenswrapper[4849]: I1203 12:33:52.441276 4849 patch_prober.go:28] interesting pod/logging-loki-ingester-0 container/loki-ingester namespace/openshift-logging: Readiness probe status=failure output="HTTP probe failed with statuscode: 503" start-of-body=Ingester not ready: waiting for 15s after being ready Dec 03 12:33:52 crc kubenswrapper[4849]: I1203 12:33:52.441628 4849 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-logging/logging-loki-ingester-0" podUID="39d1c4cc-1f24-4c05-b7af-87cba182e3e6" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 12:34:02 crc kubenswrapper[4849]: I1203 12:34:02.441244 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-logging/logging-loki-ingester-0" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.335707 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-lc2pm"] Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.338269 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.340388 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.348385 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.348593 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.348955 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-g4l8d" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.349083 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350740 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350838 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350876 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350918 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350945 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.350974 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwxms\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.351009 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.351039 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.351058 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.351287 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.351348 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.354116 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.386131 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-lc2pm"] Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.390815 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-lc2pm"] Dec 03 12:34:21 crc kubenswrapper[4849]: E1203 12:34:21.391328 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[collector-syslog-receiver collector-token config config-openshift-service-cacrt datadir entrypoint kube-api-access-pwxms metrics sa-token tmp trusted-ca], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-logging/collector-lc2pm" podUID="6689d12c-388e-4b36-9922-d389b7aa1ab8" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453432 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453507 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453556 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453627 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453682 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453722 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453739 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453785 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwxms\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453828 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453845 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.453860 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.456830 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.457582 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.458039 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: E1203 12:34:21.458115 4849 secret.go:188] Couldn't get secret openshift-logging/collector-syslog-receiver: secret "collector-syslog-receiver" not found Dec 03 12:34:21 crc kubenswrapper[4849]: E1203 12:34:21.458154 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver podName:6689d12c-388e-4b36-9922-d389b7aa1ab8 nodeName:}" failed. No retries permitted until 2025-12-03 12:34:21.958141022 +0000 UTC m=+808.419988805 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "collector-syslog-receiver" (UniqueName: "kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver") pod "collector-lc2pm" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8") : secret "collector-syslog-receiver" not found Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.459822 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: E1203 12:34:21.463813 4849 secret.go:188] Couldn't get secret openshift-logging/collector-metrics: secret "collector-metrics" not found Dec 03 12:34:21 crc kubenswrapper[4849]: E1203 12:34:21.463868 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics podName:6689d12c-388e-4b36-9922-d389b7aa1ab8 nodeName:}" failed. No retries permitted until 2025-12-03 12:34:21.963855017 +0000 UTC m=+808.425702800 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics" (UniqueName: "kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics") pod "collector-lc2pm" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8") : secret "collector-metrics" not found Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.465415 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.470796 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.473903 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.478577 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.480466 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwxms\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.961369 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:21 crc kubenswrapper[4849]: I1203 12:34:21.963947 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.063454 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.067072 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") pod \"collector-lc2pm\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " pod="openshift-logging/collector-lc2pm" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.095849 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lc2pm" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.102909 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lc2pm" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164194 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164253 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164276 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164323 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164364 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwxms\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164382 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164395 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164413 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164468 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164538 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164579 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint\") pod \"6689d12c-388e-4b36-9922-d389b7aa1ab8\" (UID: \"6689d12c-388e-4b36-9922-d389b7aa1ab8\") " Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164766 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config" (OuterVolumeSpecName: "config") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164882 4849 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.164923 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.165021 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir" (OuterVolumeSpecName: "datadir") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.165083 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.165594 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint" (OuterVolumeSpecName: "entrypoint") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "entrypoint". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.166796 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp" (OuterVolumeSpecName: "tmp") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.166823 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms" (OuterVolumeSpecName: "kube-api-access-pwxms") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "kube-api-access-pwxms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.167070 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token" (OuterVolumeSpecName: "sa-token") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.167257 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token" (OuterVolumeSpecName: "collector-token") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "collector-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.167513 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics" (OuterVolumeSpecName: "metrics") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.167577 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "6689d12c-388e-4b36-9922-d389b7aa1ab8" (UID: "6689d12c-388e-4b36-9922-d389b7aa1ab8"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266011 4849 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266043 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwxms\" (UniqueName: \"kubernetes.io/projected/6689d12c-388e-4b36-9922-d389b7aa1ab8-kube-api-access-pwxms\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266056 4849 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-token\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266066 4849 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/6689d12c-388e-4b36-9922-d389b7aa1ab8-datadir\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266075 4849 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266085 4849 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266092 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266100 4849 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/6689d12c-388e-4b36-9922-d389b7aa1ab8-entrypoint\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266107 4849 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/6689d12c-388e-4b36-9922-d389b7aa1ab8-tmp\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:22 crc kubenswrapper[4849]: I1203 12:34:22.266114 4849 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/6689d12c-388e-4b36-9922-d389b7aa1ab8-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.100903 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-lc2pm" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.130421 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-lc2pm"] Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.134337 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-lc2pm"] Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.143256 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-86rhs"] Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.144335 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.151952 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.152676 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.152983 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.153135 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.153137 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-g4l8d" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.155272 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.164796 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-86rhs"] Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178561 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-sa-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178627 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4jzv\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-kube-api-access-x4jzv\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178660 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178677 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config-openshift-service-cacrt\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178775 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-trusted-ca\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178846 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-syslog-receiver\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178884 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-entrypoint\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.178913 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-metrics\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.179004 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c71bb35c-e32a-4a28-98d4-8ba714fcd547-tmp\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.179048 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.179065 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c71bb35c-e32a-4a28-98d4-8ba714fcd547-datadir\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279702 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279735 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c71bb35c-e32a-4a28-98d4-8ba714fcd547-datadir\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279782 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-sa-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279804 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4jzv\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-kube-api-access-x4jzv\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279819 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279836 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config-openshift-service-cacrt\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279870 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/c71bb35c-e32a-4a28-98d4-8ba714fcd547-datadir\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279888 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-trusted-ca\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279937 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-syslog-receiver\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279963 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-entrypoint\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.279992 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-metrics\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.280056 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c71bb35c-e32a-4a28-98d4-8ba714fcd547-tmp\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.280680 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.281285 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-entrypoint\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.281461 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-config-openshift-service-cacrt\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.281487 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c71bb35c-e32a-4a28-98d4-8ba714fcd547-trusted-ca\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.282965 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/c71bb35c-e32a-4a28-98d4-8ba714fcd547-tmp\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.283250 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-syslog-receiver\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.284022 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-metrics\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.285261 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/c71bb35c-e32a-4a28-98d4-8ba714fcd547-collector-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.293534 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-sa-token\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.293715 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4jzv\" (UniqueName: \"kubernetes.io/projected/c71bb35c-e32a-4a28-98d4-8ba714fcd547-kube-api-access-x4jzv\") pod \"collector-86rhs\" (UID: \"c71bb35c-e32a-4a28-98d4-8ba714fcd547\") " pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.463907 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-86rhs" Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.812196 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-86rhs"] Dec 03 12:34:23 crc kubenswrapper[4849]: I1203 12:34:23.863225 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6689d12c-388e-4b36-9922-d389b7aa1ab8" path="/var/lib/kubelet/pods/6689d12c-388e-4b36-9922-d389b7aa1ab8/volumes" Dec 03 12:34:24 crc kubenswrapper[4849]: I1203 12:34:24.105816 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-86rhs" event={"ID":"c71bb35c-e32a-4a28-98d4-8ba714fcd547","Type":"ContainerStarted","Data":"93a8d91088ef656a9e65a273b8c05b7147263bff175ac68b6fe3ed794107ab30"} Dec 03 12:34:29 crc kubenswrapper[4849]: I1203 12:34:29.131871 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-86rhs" event={"ID":"c71bb35c-e32a-4a28-98d4-8ba714fcd547","Type":"ContainerStarted","Data":"3424eeeae6a6a397d4e748f39d90d418cbe0e98423467faf17eaa19264423a48"} Dec 03 12:34:29 crc kubenswrapper[4849]: I1203 12:34:29.148594 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-86rhs" podStartSLOduration=1.137908387 podStartE2EDuration="6.148579397s" podCreationTimestamp="2025-12-03 12:34:23 +0000 UTC" firstStartedPulling="2025-12-03 12:34:23.818029572 +0000 UTC m=+810.279877355" lastFinishedPulling="2025-12-03 12:34:28.828700581 +0000 UTC m=+815.290548365" observedRunningTime="2025-12-03 12:34:29.145883618 +0000 UTC m=+815.607731401" watchObservedRunningTime="2025-12-03 12:34:29.148579397 +0000 UTC m=+815.610427180" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.105344 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t"] Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.106830 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.111918 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.127560 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t"] Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.207932 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.208171 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcldr\" (UniqueName: \"kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.208311 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.309921 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.310083 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.310141 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcldr\" (UniqueName: \"kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.310403 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.310470 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.325479 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcldr\" (UniqueName: \"kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.427126 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:54 crc kubenswrapper[4849]: I1203 12:34:54.768470 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t"] Dec 03 12:34:55 crc kubenswrapper[4849]: I1203 12:34:55.263333 4849 generic.go:334] "Generic (PLEG): container finished" podID="03105ea5-dd64-4df6-9158-d45a6686afed" containerID="cbbfc9779bcb89ad08af22da6247ec388ce620fdfbdd72d231cd93d622f18879" exitCode=0 Dec 03 12:34:55 crc kubenswrapper[4849]: I1203 12:34:55.263438 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" event={"ID":"03105ea5-dd64-4df6-9158-d45a6686afed","Type":"ContainerDied","Data":"cbbfc9779bcb89ad08af22da6247ec388ce620fdfbdd72d231cd93d622f18879"} Dec 03 12:34:55 crc kubenswrapper[4849]: I1203 12:34:55.263555 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" event={"ID":"03105ea5-dd64-4df6-9158-d45a6686afed","Type":"ContainerStarted","Data":"25feb0b2485264dad1352e3c80d9062da58358fb5fdb4ff4ed36b266c4491776"} Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.370893 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.372106 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.378295 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.439317 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5ccn\" (UniqueName: \"kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.439391 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.439461 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.541024 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.541171 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.541255 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5ccn\" (UniqueName: \"kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.541462 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.541558 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.562059 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5ccn\" (UniqueName: \"kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn\") pod \"redhat-operators-w49vr\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:56 crc kubenswrapper[4849]: I1203 12:34:56.684713 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.045779 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.274688 4849 generic.go:334] "Generic (PLEG): container finished" podID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerID="1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca" exitCode=0 Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.274750 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerDied","Data":"1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca"} Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.274808 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerStarted","Data":"fc4aff8e82a5b9b1524a967121a2ba1414d512b9f85b39bcd492dcc3c2f2bf12"} Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.276154 4849 generic.go:334] "Generic (PLEG): container finished" podID="03105ea5-dd64-4df6-9158-d45a6686afed" containerID="c1f731c113b6c5941e8385e785e63e87c2e755cd1f570e0bb3dcdd9bd651e314" exitCode=0 Dec 03 12:34:57 crc kubenswrapper[4849]: I1203 12:34:57.276184 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" event={"ID":"03105ea5-dd64-4df6-9158-d45a6686afed","Type":"ContainerDied","Data":"c1f731c113b6c5941e8385e785e63e87c2e755cd1f570e0bb3dcdd9bd651e314"} Dec 03 12:34:58 crc kubenswrapper[4849]: I1203 12:34:58.288029 4849 generic.go:334] "Generic (PLEG): container finished" podID="03105ea5-dd64-4df6-9158-d45a6686afed" containerID="242c0626e1cb14232fdd7c9dc725264acdf7c7534308a2a54a34ddf53e3fc198" exitCode=0 Dec 03 12:34:58 crc kubenswrapper[4849]: I1203 12:34:58.288112 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" event={"ID":"03105ea5-dd64-4df6-9158-d45a6686afed","Type":"ContainerDied","Data":"242c0626e1cb14232fdd7c9dc725264acdf7c7534308a2a54a34ddf53e3fc198"} Dec 03 12:34:58 crc kubenswrapper[4849]: I1203 12:34:58.289538 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerStarted","Data":"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246"} Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.296700 4849 generic.go:334] "Generic (PLEG): container finished" podID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerID="0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246" exitCode=0 Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.296734 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerDied","Data":"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246"} Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.521103 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.581727 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util\") pod \"03105ea5-dd64-4df6-9158-d45a6686afed\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.581836 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle\") pod \"03105ea5-dd64-4df6-9158-d45a6686afed\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.581864 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcldr\" (UniqueName: \"kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr\") pod \"03105ea5-dd64-4df6-9158-d45a6686afed\" (UID: \"03105ea5-dd64-4df6-9158-d45a6686afed\") " Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.582344 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle" (OuterVolumeSpecName: "bundle") pod "03105ea5-dd64-4df6-9158-d45a6686afed" (UID: "03105ea5-dd64-4df6-9158-d45a6686afed"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.586445 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr" (OuterVolumeSpecName: "kube-api-access-hcldr") pod "03105ea5-dd64-4df6-9158-d45a6686afed" (UID: "03105ea5-dd64-4df6-9158-d45a6686afed"). InnerVolumeSpecName "kube-api-access-hcldr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.591342 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util" (OuterVolumeSpecName: "util") pod "03105ea5-dd64-4df6-9158-d45a6686afed" (UID: "03105ea5-dd64-4df6-9158-d45a6686afed"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.683824 4849 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.683858 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcldr\" (UniqueName: \"kubernetes.io/projected/03105ea5-dd64-4df6-9158-d45a6686afed-kube-api-access-hcldr\") on node \"crc\" DevicePath \"\"" Dec 03 12:34:59 crc kubenswrapper[4849]: I1203 12:34:59.683870 4849 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/03105ea5-dd64-4df6-9158-d45a6686afed-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:00 crc kubenswrapper[4849]: I1203 12:35:00.302391 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerStarted","Data":"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6"} Dec 03 12:35:00 crc kubenswrapper[4849]: I1203 12:35:00.304260 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" event={"ID":"03105ea5-dd64-4df6-9158-d45a6686afed","Type":"ContainerDied","Data":"25feb0b2485264dad1352e3c80d9062da58358fb5fdb4ff4ed36b266c4491776"} Dec 03 12:35:00 crc kubenswrapper[4849]: I1203 12:35:00.304291 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25feb0b2485264dad1352e3c80d9062da58358fb5fdb4ff4ed36b266c4491776" Dec 03 12:35:00 crc kubenswrapper[4849]: I1203 12:35:00.304332 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t" Dec 03 12:35:00 crc kubenswrapper[4849]: I1203 12:35:00.319013 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w49vr" podStartSLOduration=1.830082542 podStartE2EDuration="4.319001355s" podCreationTimestamp="2025-12-03 12:34:56 +0000 UTC" firstStartedPulling="2025-12-03 12:34:57.275908811 +0000 UTC m=+843.737756594" lastFinishedPulling="2025-12-03 12:34:59.764827625 +0000 UTC m=+846.226675407" observedRunningTime="2025-12-03 12:35:00.315624224 +0000 UTC m=+846.777472007" watchObservedRunningTime="2025-12-03 12:35:00.319001355 +0000 UTC m=+846.780849138" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.681777 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h"] Dec 03 12:35:05 crc kubenswrapper[4849]: E1203 12:35:05.682177 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="pull" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.682187 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="pull" Dec 03 12:35:05 crc kubenswrapper[4849]: E1203 12:35:05.682209 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="util" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.682215 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="util" Dec 03 12:35:05 crc kubenswrapper[4849]: E1203 12:35:05.682226 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="extract" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.682231 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="extract" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.682342 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="03105ea5-dd64-4df6-9158-d45a6686afed" containerName="extract" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.682798 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.684418 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-frdgm" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.684494 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.684570 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.690793 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h"] Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.763569 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5xqc\" (UniqueName: \"kubernetes.io/projected/3f89cce2-ef29-4fce-9144-cffaa419c936-kube-api-access-c5xqc\") pod \"nmstate-operator-5b5b58f5c8-5v46h\" (UID: \"3f89cce2-ef29-4fce-9144-cffaa419c936\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.864675 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5xqc\" (UniqueName: \"kubernetes.io/projected/3f89cce2-ef29-4fce-9144-cffaa419c936-kube-api-access-c5xqc\") pod \"nmstate-operator-5b5b58f5c8-5v46h\" (UID: \"3f89cce2-ef29-4fce-9144-cffaa419c936\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" Dec 03 12:35:05 crc kubenswrapper[4849]: I1203 12:35:05.880735 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5xqc\" (UniqueName: \"kubernetes.io/projected/3f89cce2-ef29-4fce-9144-cffaa419c936-kube-api-access-c5xqc\") pod \"nmstate-operator-5b5b58f5c8-5v46h\" (UID: \"3f89cce2-ef29-4fce-9144-cffaa419c936\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" Dec 03 12:35:06 crc kubenswrapper[4849]: I1203 12:35:06.006619 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" Dec 03 12:35:06 crc kubenswrapper[4849]: I1203 12:35:06.346875 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h"] Dec 03 12:35:06 crc kubenswrapper[4849]: W1203 12:35:06.351437 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f89cce2_ef29_4fce_9144_cffaa419c936.slice/crio-678868d07b18a1b61986b7c84b027ae0af6c62f376720a27fecd0d741f1384d4 WatchSource:0}: Error finding container 678868d07b18a1b61986b7c84b027ae0af6c62f376720a27fecd0d741f1384d4: Status 404 returned error can't find the container with id 678868d07b18a1b61986b7c84b027ae0af6c62f376720a27fecd0d741f1384d4 Dec 03 12:35:06 crc kubenswrapper[4849]: I1203 12:35:06.684904 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:06 crc kubenswrapper[4849]: I1203 12:35:06.684947 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:06 crc kubenswrapper[4849]: I1203 12:35:06.720153 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:07 crc kubenswrapper[4849]: I1203 12:35:07.339097 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" event={"ID":"3f89cce2-ef29-4fce-9144-cffaa419c936","Type":"ContainerStarted","Data":"678868d07b18a1b61986b7c84b027ae0af6c62f376720a27fecd0d741f1384d4"} Dec 03 12:35:07 crc kubenswrapper[4849]: I1203 12:35:07.365127 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:09 crc kubenswrapper[4849]: I1203 12:35:09.349429 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" event={"ID":"3f89cce2-ef29-4fce-9144-cffaa419c936","Type":"ContainerStarted","Data":"cf1b862b5d33f1f46d63ff28d830feba0d59adefd9a07112de607a24a6c51b05"} Dec 03 12:35:09 crc kubenswrapper[4849]: I1203 12:35:09.362530 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5v46h" podStartSLOduration=2.417316596 podStartE2EDuration="4.362515907s" podCreationTimestamp="2025-12-03 12:35:05 +0000 UTC" firstStartedPulling="2025-12-03 12:35:06.353106615 +0000 UTC m=+852.814954398" lastFinishedPulling="2025-12-03 12:35:08.298305926 +0000 UTC m=+854.760153709" observedRunningTime="2025-12-03 12:35:09.359879238 +0000 UTC m=+855.821727021" watchObservedRunningTime="2025-12-03 12:35:09.362515907 +0000 UTC m=+855.824363690" Dec 03 12:35:09 crc kubenswrapper[4849]: I1203 12:35:09.766488 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:35:09 crc kubenswrapper[4849]: I1203 12:35:09.766842 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w49vr" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="registry-server" containerID="cri-o://87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6" gracePeriod=2 Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.104297 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.123543 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content\") pod \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.123684 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5ccn\" (UniqueName: \"kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn\") pod \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.123758 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities\") pod \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\" (UID: \"1d75fd87-6cbd-470a-9f1a-76e531b9096b\") " Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.124673 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities" (OuterVolumeSpecName: "utilities") pod "1d75fd87-6cbd-470a-9f1a-76e531b9096b" (UID: "1d75fd87-6cbd-470a-9f1a-76e531b9096b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.128618 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn" (OuterVolumeSpecName: "kube-api-access-m5ccn") pod "1d75fd87-6cbd-470a-9f1a-76e531b9096b" (UID: "1d75fd87-6cbd-470a-9f1a-76e531b9096b"). InnerVolumeSpecName "kube-api-access-m5ccn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.198203 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d75fd87-6cbd-470a-9f1a-76e531b9096b" (UID: "1d75fd87-6cbd-470a-9f1a-76e531b9096b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.225420 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5ccn\" (UniqueName: \"kubernetes.io/projected/1d75fd87-6cbd-470a-9f1a-76e531b9096b-kube-api-access-m5ccn\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.225450 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.225460 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d75fd87-6cbd-470a-9f1a-76e531b9096b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.357267 4849 generic.go:334] "Generic (PLEG): container finished" podID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerID="87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6" exitCode=0 Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.357319 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w49vr" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.357323 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerDied","Data":"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6"} Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.357561 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w49vr" event={"ID":"1d75fd87-6cbd-470a-9f1a-76e531b9096b","Type":"ContainerDied","Data":"fc4aff8e82a5b9b1524a967121a2ba1414d512b9f85b39bcd492dcc3c2f2bf12"} Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.357585 4849 scope.go:117] "RemoveContainer" containerID="87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.371159 4849 scope.go:117] "RemoveContainer" containerID="0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.378415 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.382788 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w49vr"] Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.406055 4849 scope.go:117] "RemoveContainer" containerID="1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.420465 4849 scope.go:117] "RemoveContainer" containerID="87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6" Dec 03 12:35:10 crc kubenswrapper[4849]: E1203 12:35:10.420816 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6\": container with ID starting with 87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6 not found: ID does not exist" containerID="87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.420844 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6"} err="failed to get container status \"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6\": rpc error: code = NotFound desc = could not find container \"87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6\": container with ID starting with 87f38e3895efc9171030dd89f0c4504ea7c325758c6c3bff84bf4751953de2c6 not found: ID does not exist" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.420865 4849 scope.go:117] "RemoveContainer" containerID="0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246" Dec 03 12:35:10 crc kubenswrapper[4849]: E1203 12:35:10.421208 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246\": container with ID starting with 0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246 not found: ID does not exist" containerID="0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.421249 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246"} err="failed to get container status \"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246\": rpc error: code = NotFound desc = could not find container \"0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246\": container with ID starting with 0dce924b25cdc99848249a140470b3de866598962e0553c94c779a83bdec0246 not found: ID does not exist" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.421263 4849 scope.go:117] "RemoveContainer" containerID="1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca" Dec 03 12:35:10 crc kubenswrapper[4849]: E1203 12:35:10.421536 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca\": container with ID starting with 1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca not found: ID does not exist" containerID="1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca" Dec 03 12:35:10 crc kubenswrapper[4849]: I1203 12:35:10.421577 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca"} err="failed to get container status \"1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca\": rpc error: code = NotFound desc = could not find container \"1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca\": container with ID starting with 1756899b37b1b30ba71388bd919ae6fc23e4de71305546bf99cfb9f6ad1cc0ca not found: ID does not exist" Dec 03 12:35:11 crc kubenswrapper[4849]: I1203 12:35:11.862175 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" path="/var/lib/kubelet/pods/1d75fd87-6cbd-470a-9f1a-76e531b9096b/volumes" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.222155 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n"] Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.222588 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="extract-content" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.222604 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="extract-content" Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.222615 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="extract-utilities" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.222622 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="extract-utilities" Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.222634 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="registry-server" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.222657 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="registry-server" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.222808 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d75fd87-6cbd-470a-9f1a-76e531b9096b" containerName="registry-server" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.223441 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.225815 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-pt66p" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.233062 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.255747 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-r2xvh"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.260679 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.270367 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.274930 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.276228 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.300046 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.300815 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-ovs-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.300921 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-nmstate-lock\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.300993 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-dbus-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.301134 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db9r5\" (UniqueName: \"kubernetes.io/projected/1aa532e3-4844-4ff4-a359-8414e22efb83-kube-api-access-db9r5\") pod \"nmstate-metrics-7f946cbc9-j2x7n\" (UID: \"1aa532e3-4844-4ff4-a359-8414e22efb83\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.301231 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7tbd\" (UniqueName: \"kubernetes.io/projected/7ddd8581-dade-41ec-8f10-6fa2ea56f767-kube-api-access-s7tbd\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.331499 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.332628 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.336754 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.336943 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.337157 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wvjwr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.347251 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402393 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-ovs-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402509 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-ovs-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402512 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402588 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-nmstate-lock\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402619 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b81e93a-513f-45e6-b647-e4767283c8e5-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402665 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhfgb\" (UniqueName: \"kubernetes.io/projected/b34bd184-2a40-4689-817b-0fff0d519a11-kube-api-access-fhfgb\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402709 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-nmstate-lock\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402823 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-dbus-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402864 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnm57\" (UniqueName: \"kubernetes.io/projected/2b81e93a-513f-45e6-b647-e4767283c8e5-kube-api-access-jnm57\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.402923 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db9r5\" (UniqueName: \"kubernetes.io/projected/1aa532e3-4844-4ff4-a359-8414e22efb83-kube-api-access-db9r5\") pod \"nmstate-metrics-7f946cbc9-j2x7n\" (UID: \"1aa532e3-4844-4ff4-a359-8414e22efb83\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.403002 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.403015 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ddd8581-dade-41ec-8f10-6fa2ea56f767-dbus-socket\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.403020 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7tbd\" (UniqueName: \"kubernetes.io/projected/7ddd8581-dade-41ec-8f10-6fa2ea56f767-kube-api-access-s7tbd\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.418813 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db9r5\" (UniqueName: \"kubernetes.io/projected/1aa532e3-4844-4ff4-a359-8414e22efb83-kube-api-access-db9r5\") pod \"nmstate-metrics-7f946cbc9-j2x7n\" (UID: \"1aa532e3-4844-4ff4-a359-8414e22efb83\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.422064 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7tbd\" (UniqueName: \"kubernetes.io/projected/7ddd8581-dade-41ec-8f10-6fa2ea56f767-kube-api-access-s7tbd\") pod \"nmstate-handler-r2xvh\" (UID: \"7ddd8581-dade-41ec-8f10-6fa2ea56f767\") " pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.496301 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-58cc767798-k7g2l"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.497117 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.506655 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58cc767798-k7g2l"] Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.507706 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.507785 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b81e93a-513f-45e6-b647-e4767283c8e5-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.507822 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhfgb\" (UniqueName: \"kubernetes.io/projected/b34bd184-2a40-4689-817b-0fff0d519a11-kube-api-access-fhfgb\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.507867 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnm57\" (UniqueName: \"kubernetes.io/projected/2b81e93a-513f-45e6-b647-e4767283c8e5-kube-api-access-jnm57\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.507938 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.508068 4849 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.508117 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert podName:2b81e93a-513f-45e6-b647-e4767283c8e5 nodeName:}" failed. No retries permitted until 2025-12-03 12:35:16.008103451 +0000 UTC m=+862.469951233 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-l6hxr" (UID: "2b81e93a-513f-45e6-b647-e4767283c8e5") : secret "plugin-serving-cert" not found Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.508892 4849 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 12:35:15 crc kubenswrapper[4849]: E1203 12:35:15.508951 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair podName:b34bd184-2a40-4689-817b-0fff0d519a11 nodeName:}" failed. No retries permitted until 2025-12-03 12:35:16.008936187 +0000 UTC m=+862.470783970 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-9pl9w" (UID: "b34bd184-2a40-4689-817b-0fff0d519a11") : secret "openshift-nmstate-webhook" not found Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.508891 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b81e93a-513f-45e6-b647-e4767283c8e5-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.524978 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnm57\" (UniqueName: \"kubernetes.io/projected/2b81e93a-513f-45e6-b647-e4767283c8e5-kube-api-access-jnm57\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.529982 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhfgb\" (UniqueName: \"kubernetes.io/projected/b34bd184-2a40-4689-817b-0fff0d519a11-kube-api-access-fhfgb\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.539116 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.590813 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612272 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-service-ca\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612307 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612386 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvv6r\" (UniqueName: \"kubernetes.io/projected/bb10361f-3f90-4e9a-9544-b4d230e628c2-kube-api-access-fvv6r\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612504 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-oauth-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612520 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-trusted-ca-bundle\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612538 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-oauth-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.612551 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.714120 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvv6r\" (UniqueName: \"kubernetes.io/projected/bb10361f-3f90-4e9a-9544-b4d230e628c2-kube-api-access-fvv6r\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715259 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-oauth-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-trusted-ca-bundle\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715346 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-oauth-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715475 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715934 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-service-ca\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.715965 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.718849 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-service-ca\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.719088 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.719296 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-oauth-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.719705 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb10361f-3f90-4e9a-9544-b4d230e628c2-trusted-ca-bundle\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.723355 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-oauth-config\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.723936 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb10361f-3f90-4e9a-9544-b4d230e628c2-console-serving-cert\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.727608 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvv6r\" (UniqueName: \"kubernetes.io/projected/bb10361f-3f90-4e9a-9544-b4d230e628c2-kube-api-access-fvv6r\") pod \"console-58cc767798-k7g2l\" (UID: \"bb10361f-3f90-4e9a-9544-b4d230e628c2\") " pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.825810 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:15 crc kubenswrapper[4849]: I1203 12:35:15.920801 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n"] Dec 03 12:35:15 crc kubenswrapper[4849]: W1203 12:35:15.925350 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aa532e3_4844_4ff4_a359_8414e22efb83.slice/crio-8a89a24b1bd05607fa54183197acd4eff6988ae04686279fcb089c4627f91e32 WatchSource:0}: Error finding container 8a89a24b1bd05607fa54183197acd4eff6988ae04686279fcb089c4627f91e32: Status 404 returned error can't find the container with id 8a89a24b1bd05607fa54183197acd4eff6988ae04686279fcb089c4627f91e32 Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.018800 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.019014 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.021389 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b81e93a-513f-45e6-b647-e4767283c8e5-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-l6hxr\" (UID: \"2b81e93a-513f-45e6-b647-e4767283c8e5\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.021711 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/b34bd184-2a40-4689-817b-0fff0d519a11-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-9pl9w\" (UID: \"b34bd184-2a40-4689-817b-0fff0d519a11\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.199120 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.233187 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58cc767798-k7g2l"] Dec 03 12:35:16 crc kubenswrapper[4849]: W1203 12:35:16.236924 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb10361f_3f90_4e9a_9544_b4d230e628c2.slice/crio-aa97e436d88de6d0e121cf8b4c378b695e810d96d12c01546fddc3eeffb84548 WatchSource:0}: Error finding container aa97e436d88de6d0e121cf8b4c378b695e810d96d12c01546fddc3eeffb84548: Status 404 returned error can't find the container with id aa97e436d88de6d0e121cf8b4c378b695e810d96d12c01546fddc3eeffb84548 Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.250859 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.390961 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" event={"ID":"1aa532e3-4844-4ff4-a359-8414e22efb83","Type":"ContainerStarted","Data":"8a89a24b1bd05607fa54183197acd4eff6988ae04686279fcb089c4627f91e32"} Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.392195 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-r2xvh" event={"ID":"7ddd8581-dade-41ec-8f10-6fa2ea56f767","Type":"ContainerStarted","Data":"03182f93b4cbbf883c2c1618ed56b3bb04c9c427e856a0b0453a708f34205b9e"} Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.394629 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58cc767798-k7g2l" event={"ID":"bb10361f-3f90-4e9a-9544-b4d230e628c2","Type":"ContainerStarted","Data":"29a2a2cb5205a0fef7a3a68594e903284c3acf83e0d2474289e9fbdc167e4035"} Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.394675 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58cc767798-k7g2l" event={"ID":"bb10361f-3f90-4e9a-9544-b4d230e628c2","Type":"ContainerStarted","Data":"aa97e436d88de6d0e121cf8b4c378b695e810d96d12c01546fddc3eeffb84548"} Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.412834 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-58cc767798-k7g2l" podStartSLOduration=1.4128237289999999 podStartE2EDuration="1.412823729s" podCreationTimestamp="2025-12-03 12:35:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:35:16.409043189 +0000 UTC m=+862.870890972" watchObservedRunningTime="2025-12-03 12:35:16.412823729 +0000 UTC m=+862.874671512" Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.561107 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w"] Dec 03 12:35:16 crc kubenswrapper[4849]: I1203 12:35:16.635161 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr"] Dec 03 12:35:16 crc kubenswrapper[4849]: W1203 12:35:16.636996 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b81e93a_513f_45e6_b647_e4767283c8e5.slice/crio-9326935e9c04cdef417630519370dedead0b44f3bc8ea75dbdc503d2bcc46d68 WatchSource:0}: Error finding container 9326935e9c04cdef417630519370dedead0b44f3bc8ea75dbdc503d2bcc46d68: Status 404 returned error can't find the container with id 9326935e9c04cdef417630519370dedead0b44f3bc8ea75dbdc503d2bcc46d68 Dec 03 12:35:17 crc kubenswrapper[4849]: I1203 12:35:17.401167 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" event={"ID":"2b81e93a-513f-45e6-b647-e4767283c8e5","Type":"ContainerStarted","Data":"9326935e9c04cdef417630519370dedead0b44f3bc8ea75dbdc503d2bcc46d68"} Dec 03 12:35:17 crc kubenswrapper[4849]: I1203 12:35:17.402205 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" event={"ID":"b34bd184-2a40-4689-817b-0fff0d519a11","Type":"ContainerStarted","Data":"2a23f07613f56ca34e7b32ce02fabdba721846b18a69a3fefa96fdbcecc9c566"} Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.420884 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" event={"ID":"1aa532e3-4844-4ff4-a359-8414e22efb83","Type":"ContainerStarted","Data":"4171f6483ffedce6d32365a48b2d00adb85967fc6cfe185f93f692d635bf26b8"} Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.422757 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-r2xvh" event={"ID":"7ddd8581-dade-41ec-8f10-6fa2ea56f767","Type":"ContainerStarted","Data":"c95e5a7a91795f3b301b4025d568670c6d77582e933de12e34510cc7cecaf750"} Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.422967 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.425099 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" event={"ID":"b34bd184-2a40-4689-817b-0fff0d519a11","Type":"ContainerStarted","Data":"d834eb57fcfef1f51f4d6bbd26568d5bfbeeb91d1aace124ca1c52ae4a3b7ec5"} Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.425832 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.438036 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-r2xvh" podStartSLOduration=1.665123972 podStartE2EDuration="4.438023935s" podCreationTimestamp="2025-12-03 12:35:15 +0000 UTC" firstStartedPulling="2025-12-03 12:35:15.621991117 +0000 UTC m=+862.083838901" lastFinishedPulling="2025-12-03 12:35:18.39489108 +0000 UTC m=+864.856738864" observedRunningTime="2025-12-03 12:35:19.433705133 +0000 UTC m=+865.895552916" watchObservedRunningTime="2025-12-03 12:35:19.438023935 +0000 UTC m=+865.899871718" Dec 03 12:35:19 crc kubenswrapper[4849]: I1203 12:35:19.455527 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" podStartSLOduration=2.408564682 podStartE2EDuration="4.45550527s" podCreationTimestamp="2025-12-03 12:35:15 +0000 UTC" firstStartedPulling="2025-12-03 12:35:16.567919628 +0000 UTC m=+863.029767411" lastFinishedPulling="2025-12-03 12:35:18.614860216 +0000 UTC m=+865.076707999" observedRunningTime="2025-12-03 12:35:19.450698791 +0000 UTC m=+865.912546573" watchObservedRunningTime="2025-12-03 12:35:19.45550527 +0000 UTC m=+865.917353052" Dec 03 12:35:20 crc kubenswrapper[4849]: I1203 12:35:20.432219 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" event={"ID":"2b81e93a-513f-45e6-b647-e4767283c8e5","Type":"ContainerStarted","Data":"4546dbfbfaaa7ad82d71194202ca3941c7bc73263a16e810171644b52fa699a7"} Dec 03 12:35:20 crc kubenswrapper[4849]: I1203 12:35:20.448383 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-l6hxr" podStartSLOduration=2.720377359 podStartE2EDuration="5.4483679s" podCreationTimestamp="2025-12-03 12:35:15 +0000 UTC" firstStartedPulling="2025-12-03 12:35:16.638698539 +0000 UTC m=+863.100546312" lastFinishedPulling="2025-12-03 12:35:19.366689069 +0000 UTC m=+865.828536853" observedRunningTime="2025-12-03 12:35:20.443690163 +0000 UTC m=+866.905537946" watchObservedRunningTime="2025-12-03 12:35:20.4483679 +0000 UTC m=+866.910215683" Dec 03 12:35:21 crc kubenswrapper[4849]: I1203 12:35:21.439177 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" event={"ID":"1aa532e3-4844-4ff4-a359-8414e22efb83","Type":"ContainerStarted","Data":"f184864e8d086e376c9fcc9a76ef3d887c69d2aa13b294182dde02b88f245474"} Dec 03 12:35:21 crc kubenswrapper[4849]: I1203 12:35:21.455005 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-j2x7n" podStartSLOduration=1.779969642 podStartE2EDuration="6.454992069s" podCreationTimestamp="2025-12-03 12:35:15 +0000 UTC" firstStartedPulling="2025-12-03 12:35:15.927324091 +0000 UTC m=+862.389171874" lastFinishedPulling="2025-12-03 12:35:20.602346518 +0000 UTC m=+867.064194301" observedRunningTime="2025-12-03 12:35:21.451916105 +0000 UTC m=+867.913763887" watchObservedRunningTime="2025-12-03 12:35:21.454992069 +0000 UTC m=+867.916839852" Dec 03 12:35:22 crc kubenswrapper[4849]: I1203 12:35:22.677134 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:35:22 crc kubenswrapper[4849]: I1203 12:35:22.678008 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:35:25 crc kubenswrapper[4849]: I1203 12:35:25.624771 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-r2xvh" Dec 03 12:35:25 crc kubenswrapper[4849]: I1203 12:35:25.826971 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:25 crc kubenswrapper[4849]: I1203 12:35:25.827187 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:25 crc kubenswrapper[4849]: I1203 12:35:25.831347 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:26 crc kubenswrapper[4849]: I1203 12:35:26.468569 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-58cc767798-k7g2l" Dec 03 12:35:26 crc kubenswrapper[4849]: I1203 12:35:26.512326 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.174639 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.176023 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.181459 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.236070 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.236112 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq95c\" (UniqueName: \"kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.236326 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.337664 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.337837 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.337860 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq95c\" (UniqueName: \"kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.338108 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.338257 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.353530 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq95c\" (UniqueName: \"kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c\") pod \"redhat-marketplace-vkmfv\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.494430 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:31 crc kubenswrapper[4849]: I1203 12:35:31.876084 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:31 crc kubenswrapper[4849]: W1203 12:35:31.878820 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12045ba2_3133_4d9b_b80e_47f234107856.slice/crio-748284c49623178a9dade6782c8db44e19a8dd977b162f51c9a52305207e082c WatchSource:0}: Error finding container 748284c49623178a9dade6782c8db44e19a8dd977b162f51c9a52305207e082c: Status 404 returned error can't find the container with id 748284c49623178a9dade6782c8db44e19a8dd977b162f51c9a52305207e082c Dec 03 12:35:32 crc kubenswrapper[4849]: I1203 12:35:32.497187 4849 generic.go:334] "Generic (PLEG): container finished" podID="12045ba2-3133-4d9b-b80e-47f234107856" containerID="f60118e775b6e606471c30973daa51031dad90801d5d5247cc5a0c3a5c455c02" exitCode=0 Dec 03 12:35:32 crc kubenswrapper[4849]: I1203 12:35:32.497380 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerDied","Data":"f60118e775b6e606471c30973daa51031dad90801d5d5247cc5a0c3a5c455c02"} Dec 03 12:35:32 crc kubenswrapper[4849]: I1203 12:35:32.497431 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerStarted","Data":"748284c49623178a9dade6782c8db44e19a8dd977b162f51c9a52305207e082c"} Dec 03 12:35:33 crc kubenswrapper[4849]: I1203 12:35:33.503654 4849 generic.go:334] "Generic (PLEG): container finished" podID="12045ba2-3133-4d9b-b80e-47f234107856" containerID="d1c94409c418d72863f5feca4e7d1a2521fe21334f7cfcc7793e72413eaec52c" exitCode=0 Dec 03 12:35:33 crc kubenswrapper[4849]: I1203 12:35:33.503733 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerDied","Data":"d1c94409c418d72863f5feca4e7d1a2521fe21334f7cfcc7793e72413eaec52c"} Dec 03 12:35:34 crc kubenswrapper[4849]: I1203 12:35:34.510513 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerStarted","Data":"84fb43843886b6dd584e5bb346df01001a2ce3cf6f323f91b3143a967ab0e612"} Dec 03 12:35:34 crc kubenswrapper[4849]: I1203 12:35:34.523206 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vkmfv" podStartSLOduration=2.088851769 podStartE2EDuration="3.523188726s" podCreationTimestamp="2025-12-03 12:35:31 +0000 UTC" firstStartedPulling="2025-12-03 12:35:32.499190492 +0000 UTC m=+878.961038275" lastFinishedPulling="2025-12-03 12:35:33.93352745 +0000 UTC m=+880.395375232" observedRunningTime="2025-12-03 12:35:34.522764106 +0000 UTC m=+880.984611889" watchObservedRunningTime="2025-12-03 12:35:34.523188726 +0000 UTC m=+880.985036499" Dec 03 12:35:36 crc kubenswrapper[4849]: I1203 12:35:36.203910 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-9pl9w" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.171693 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.173093 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.180144 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.251352 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.251432 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.251670 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd62g\" (UniqueName: \"kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.353669 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd62g\" (UniqueName: \"kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.354039 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.354204 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.354463 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.354622 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.378108 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd62g\" (UniqueName: \"kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g\") pod \"certified-operators-qcvbt\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.487141 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:39 crc kubenswrapper[4849]: I1203 12:35:39.894623 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:40 crc kubenswrapper[4849]: I1203 12:35:40.553579 4849 generic.go:334] "Generic (PLEG): container finished" podID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerID="2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94" exitCode=0 Dec 03 12:35:40 crc kubenswrapper[4849]: I1203 12:35:40.553668 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerDied","Data":"2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94"} Dec 03 12:35:40 crc kubenswrapper[4849]: I1203 12:35:40.553823 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerStarted","Data":"11309d4f694f2be6a1dc31a299faabb09f6c78aba2037533f0a61ccf7e848123"} Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.494678 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.494900 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.526431 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.588960 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.774815 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.778628 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.782714 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.888303 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.888385 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7m5q\" (UniqueName: \"kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.888490 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.989490 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.989835 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.989878 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7m5q\" (UniqueName: \"kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.990005 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:41 crc kubenswrapper[4849]: I1203 12:35:41.990255 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:42 crc kubenswrapper[4849]: I1203 12:35:42.006610 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7m5q\" (UniqueName: \"kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q\") pod \"community-operators-n8fwv\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:42 crc kubenswrapper[4849]: I1203 12:35:42.112686 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:42 crc kubenswrapper[4849]: I1203 12:35:42.479296 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:35:42 crc kubenswrapper[4849]: W1203 12:35:42.568454 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72cc2472_992b_46c7_aee8_a64202fd596a.slice/crio-3accb7c5776137851543dd0580dcecfc1401a6719a893754f79796c9433a9863 WatchSource:0}: Error finding container 3accb7c5776137851543dd0580dcecfc1401a6719a893754f79796c9433a9863: Status 404 returned error can't find the container with id 3accb7c5776137851543dd0580dcecfc1401a6719a893754f79796c9433a9863 Dec 03 12:35:42 crc kubenswrapper[4849]: I1203 12:35:42.572278 4849 generic.go:334] "Generic (PLEG): container finished" podID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerID="055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c" exitCode=0 Dec 03 12:35:42 crc kubenswrapper[4849]: I1203 12:35:42.572361 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerDied","Data":"055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c"} Dec 03 12:35:43 crc kubenswrapper[4849]: I1203 12:35:43.579501 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerStarted","Data":"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31"} Dec 03 12:35:43 crc kubenswrapper[4849]: I1203 12:35:43.582418 4849 generic.go:334] "Generic (PLEG): container finished" podID="72cc2472-992b-46c7-aee8-a64202fd596a" containerID="4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5" exitCode=0 Dec 03 12:35:43 crc kubenswrapper[4849]: I1203 12:35:43.582462 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerDied","Data":"4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5"} Dec 03 12:35:43 crc kubenswrapper[4849]: I1203 12:35:43.582489 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerStarted","Data":"3accb7c5776137851543dd0580dcecfc1401a6719a893754f79796c9433a9863"} Dec 03 12:35:43 crc kubenswrapper[4849]: I1203 12:35:43.597989 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qcvbt" podStartSLOduration=1.982897017 podStartE2EDuration="4.597974192s" podCreationTimestamp="2025-12-03 12:35:39 +0000 UTC" firstStartedPulling="2025-12-03 12:35:40.555613255 +0000 UTC m=+887.017461038" lastFinishedPulling="2025-12-03 12:35:43.17069043 +0000 UTC m=+889.632538213" observedRunningTime="2025-12-03 12:35:43.596434507 +0000 UTC m=+890.058282290" watchObservedRunningTime="2025-12-03 12:35:43.597974192 +0000 UTC m=+890.059821976" Dec 03 12:35:44 crc kubenswrapper[4849]: I1203 12:35:44.589212 4849 generic.go:334] "Generic (PLEG): container finished" podID="72cc2472-992b-46c7-aee8-a64202fd596a" containerID="fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8" exitCode=0 Dec 03 12:35:44 crc kubenswrapper[4849]: I1203 12:35:44.589288 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerDied","Data":"fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8"} Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.367854 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.368347 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vkmfv" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="registry-server" containerID="cri-o://84fb43843886b6dd584e5bb346df01001a2ce3cf6f323f91b3143a967ab0e612" gracePeriod=2 Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.599226 4849 generic.go:334] "Generic (PLEG): container finished" podID="12045ba2-3133-4d9b-b80e-47f234107856" containerID="84fb43843886b6dd584e5bb346df01001a2ce3cf6f323f91b3143a967ab0e612" exitCode=0 Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.599382 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerDied","Data":"84fb43843886b6dd584e5bb346df01001a2ce3cf6f323f91b3143a967ab0e612"} Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.601029 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerStarted","Data":"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea"} Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.614580 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n8fwv" podStartSLOduration=3.05121052 podStartE2EDuration="4.61428488s" podCreationTimestamp="2025-12-03 12:35:41 +0000 UTC" firstStartedPulling="2025-12-03 12:35:43.583807392 +0000 UTC m=+890.045655175" lastFinishedPulling="2025-12-03 12:35:45.146881752 +0000 UTC m=+891.608729535" observedRunningTime="2025-12-03 12:35:45.614192065 +0000 UTC m=+892.076039848" watchObservedRunningTime="2025-12-03 12:35:45.61428488 +0000 UTC m=+892.076132663" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.721355 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.864529 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq95c\" (UniqueName: \"kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c\") pod \"12045ba2-3133-4d9b-b80e-47f234107856\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.864592 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities\") pod \"12045ba2-3133-4d9b-b80e-47f234107856\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.864618 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content\") pod \"12045ba2-3133-4d9b-b80e-47f234107856\" (UID: \"12045ba2-3133-4d9b-b80e-47f234107856\") " Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.865554 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities" (OuterVolumeSpecName: "utilities") pod "12045ba2-3133-4d9b-b80e-47f234107856" (UID: "12045ba2-3133-4d9b-b80e-47f234107856"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.869544 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c" (OuterVolumeSpecName: "kube-api-access-cq95c") pod "12045ba2-3133-4d9b-b80e-47f234107856" (UID: "12045ba2-3133-4d9b-b80e-47f234107856"). InnerVolumeSpecName "kube-api-access-cq95c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.878926 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12045ba2-3133-4d9b-b80e-47f234107856" (UID: "12045ba2-3133-4d9b-b80e-47f234107856"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.966624 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq95c\" (UniqueName: \"kubernetes.io/projected/12045ba2-3133-4d9b-b80e-47f234107856-kube-api-access-cq95c\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.966667 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:45 crc kubenswrapper[4849]: I1203 12:35:45.966677 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12045ba2-3133-4d9b-b80e-47f234107856-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.607703 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vkmfv" event={"ID":"12045ba2-3133-4d9b-b80e-47f234107856","Type":"ContainerDied","Data":"748284c49623178a9dade6782c8db44e19a8dd977b162f51c9a52305207e082c"} Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.607936 4849 scope.go:117] "RemoveContainer" containerID="84fb43843886b6dd584e5bb346df01001a2ce3cf6f323f91b3143a967ab0e612" Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.607722 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vkmfv" Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.622913 4849 scope.go:117] "RemoveContainer" containerID="d1c94409c418d72863f5feca4e7d1a2521fe21334f7cfcc7793e72413eaec52c" Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.631464 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.635850 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vkmfv"] Dec 03 12:35:46 crc kubenswrapper[4849]: I1203 12:35:46.650458 4849 scope.go:117] "RemoveContainer" containerID="f60118e775b6e606471c30973daa51031dad90801d5d5247cc5a0c3a5c455c02" Dec 03 12:35:47 crc kubenswrapper[4849]: I1203 12:35:47.867032 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12045ba2-3133-4d9b-b80e-47f234107856" path="/var/lib/kubelet/pods/12045ba2-3133-4d9b-b80e-47f234107856/volumes" Dec 03 12:35:49 crc kubenswrapper[4849]: I1203 12:35:49.487871 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:49 crc kubenswrapper[4849]: I1203 12:35:49.488235 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:49 crc kubenswrapper[4849]: I1203 12:35:49.527685 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:49 crc kubenswrapper[4849]: I1203 12:35:49.669297 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.201880 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb"] Dec 03 12:35:51 crc kubenswrapper[4849]: E1203 12:35:51.202308 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="extract-utilities" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.202321 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="extract-utilities" Dec 03 12:35:51 crc kubenswrapper[4849]: E1203 12:35:51.202359 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="extract-content" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.202365 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="extract-content" Dec 03 12:35:51 crc kubenswrapper[4849]: E1203 12:35:51.202381 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="registry-server" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.202387 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="registry-server" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.202534 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="12045ba2-3133-4d9b-b80e-47f234107856" containerName="registry-server" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.204172 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.206291 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.211576 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb"] Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.344366 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.344469 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qvn4\" (UniqueName: \"kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.344496 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.445774 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.445851 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qvn4\" (UniqueName: \"kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.445878 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.446266 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.446285 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.461847 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qvn4\" (UniqueName: \"kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.523008 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.540420 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-5ccf89d679-frx2r" podUID="32181932-4430-45af-9b7f-4f1941d276c3" containerName="console" containerID="cri-o://6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633" gracePeriod=15 Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.692895 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb"] Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.922149 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-5ccf89d679-frx2r_32181932-4430-45af-9b7f-4f1941d276c3/console/0.log" Dec 03 12:35:51 crc kubenswrapper[4849]: I1203 12:35:51.922361 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054297 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054370 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054460 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054490 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054547 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054635 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8jzv\" (UniqueName: \"kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054675 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config\") pod \"32181932-4430-45af-9b7f-4f1941d276c3\" (UID: \"32181932-4430-45af-9b7f-4f1941d276c3\") " Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.054798 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.055068 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca" (OuterVolumeSpecName: "service-ca") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.055128 4849 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.055413 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.055464 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config" (OuterVolumeSpecName: "console-config") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.059400 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.059417 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.059498 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv" (OuterVolumeSpecName: "kube-api-access-m8jzv") pod "32181932-4430-45af-9b7f-4f1941d276c3" (UID: "32181932-4430-45af-9b7f-4f1941d276c3"). InnerVolumeSpecName "kube-api-access-m8jzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.113291 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.113330 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.142269 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157550 4849 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157577 4849 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157588 4849 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/32181932-4430-45af-9b7f-4f1941d276c3-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157597 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8jzv\" (UniqueName: \"kubernetes.io/projected/32181932-4430-45af-9b7f-4f1941d276c3-kube-api-access-m8jzv\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157606 4849 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.157614 4849 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/32181932-4430-45af-9b7f-4f1941d276c3-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.644986 4849 generic.go:334] "Generic (PLEG): container finished" podID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerID="d6bae42a875ff4b4eab8e63f836ed0ff34b2bdcd07a6378a1fd57e823395577d" exitCode=0 Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.645055 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" event={"ID":"1f0eee16-27ac-4f1d-a968-5424af3ee3d2","Type":"ContainerDied","Data":"d6bae42a875ff4b4eab8e63f836ed0ff34b2bdcd07a6378a1fd57e823395577d"} Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.645227 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" event={"ID":"1f0eee16-27ac-4f1d-a968-5424af3ee3d2","Type":"ContainerStarted","Data":"9cd44988258183c745162f7d0642727e7f4f0719e7f6ed50c4eba8be956cf946"} Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646542 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-5ccf89d679-frx2r_32181932-4430-45af-9b7f-4f1941d276c3/console/0.log" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646597 4849 generic.go:334] "Generic (PLEG): container finished" podID="32181932-4430-45af-9b7f-4f1941d276c3" containerID="6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633" exitCode=2 Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646632 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5ccf89d679-frx2r" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646674 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ccf89d679-frx2r" event={"ID":"32181932-4430-45af-9b7f-4f1941d276c3","Type":"ContainerDied","Data":"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633"} Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646703 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5ccf89d679-frx2r" event={"ID":"32181932-4430-45af-9b7f-4f1941d276c3","Type":"ContainerDied","Data":"ef6c2dd6124df71ac29f5d34c2c695ef47d1a754e11e6794d7c37ccfce3e12dd"} Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646720 4849 scope.go:117] "RemoveContainer" containerID="6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.646771 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.662936 4849 scope.go:117] "RemoveContainer" containerID="6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633" Dec 03 12:35:52 crc kubenswrapper[4849]: E1203 12:35:52.663380 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633\": container with ID starting with 6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633 not found: ID does not exist" containerID="6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.663428 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633"} err="failed to get container status \"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633\": rpc error: code = NotFound desc = could not find container \"6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633\": container with ID starting with 6eeaee8f58937d53b01d9db8925980bde4a7bb0d03a26f9c2a2ce195536a1633 not found: ID does not exist" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.672061 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.676510 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-5ccf89d679-frx2r"] Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.676926 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.676977 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:35:52 crc kubenswrapper[4849]: I1203 12:35:52.680178 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:35:53 crc kubenswrapper[4849]: I1203 12:35:53.870242 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32181932-4430-45af-9b7f-4f1941d276c3" path="/var/lib/kubelet/pods/32181932-4430-45af-9b7f-4f1941d276c3/volumes" Dec 03 12:35:54 crc kubenswrapper[4849]: I1203 12:35:54.661207 4849 generic.go:334] "Generic (PLEG): container finished" podID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerID="52875af932f37125ea47f3a8847889feabf43e1fa73f12549a984d0d5e6adba7" exitCode=0 Dec 03 12:35:54 crc kubenswrapper[4849]: I1203 12:35:54.661274 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" event={"ID":"1f0eee16-27ac-4f1d-a968-5424af3ee3d2","Type":"ContainerDied","Data":"52875af932f37125ea47f3a8847889feabf43e1fa73f12549a984d0d5e6adba7"} Dec 03 12:35:55 crc kubenswrapper[4849]: I1203 12:35:55.668405 4849 generic.go:334] "Generic (PLEG): container finished" podID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerID="0f559b17106424377a1681460137113866472537ece4cba97eb19246377069d2" exitCode=0 Dec 03 12:35:55 crc kubenswrapper[4849]: I1203 12:35:55.668458 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" event={"ID":"1f0eee16-27ac-4f1d-a968-5424af3ee3d2","Type":"ContainerDied","Data":"0f559b17106424377a1681460137113866472537ece4cba97eb19246377069d2"} Dec 03 12:35:56 crc kubenswrapper[4849]: I1203 12:35:56.565521 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:56 crc kubenswrapper[4849]: I1203 12:35:56.565908 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qcvbt" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="registry-server" containerID="cri-o://6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31" gracePeriod=2 Dec 03 12:35:56 crc kubenswrapper[4849]: I1203 12:35:56.899212 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:56 crc kubenswrapper[4849]: I1203 12:35:56.944178 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.021103 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util\") pod \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.021189 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qvn4\" (UniqueName: \"kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4\") pod \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.021218 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle\") pod \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\" (UID: \"1f0eee16-27ac-4f1d-a968-5424af3ee3d2\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.022064 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle" (OuterVolumeSpecName: "bundle") pod "1f0eee16-27ac-4f1d-a968-5424af3ee3d2" (UID: "1f0eee16-27ac-4f1d-a968-5424af3ee3d2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.026115 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4" (OuterVolumeSpecName: "kube-api-access-2qvn4") pod "1f0eee16-27ac-4f1d-a968-5424af3ee3d2" (UID: "1f0eee16-27ac-4f1d-a968-5424af3ee3d2"). InnerVolumeSpecName "kube-api-access-2qvn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.031008 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util" (OuterVolumeSpecName: "util") pod "1f0eee16-27ac-4f1d-a968-5424af3ee3d2" (UID: "1f0eee16-27ac-4f1d-a968-5424af3ee3d2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.122465 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content\") pod \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.122578 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd62g\" (UniqueName: \"kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g\") pod \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.122667 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities\") pod \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\" (UID: \"71cc5f0f-289e-4f3a-9632-f9d7124a62c4\") " Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.123085 4849 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-util\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.123103 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qvn4\" (UniqueName: \"kubernetes.io/projected/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-kube-api-access-2qvn4\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.123114 4849 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f0eee16-27ac-4f1d-a968-5424af3ee3d2-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.123307 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities" (OuterVolumeSpecName: "utilities") pod "71cc5f0f-289e-4f3a-9632-f9d7124a62c4" (UID: "71cc5f0f-289e-4f3a-9632-f9d7124a62c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.124829 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g" (OuterVolumeSpecName: "kube-api-access-nd62g") pod "71cc5f0f-289e-4f3a-9632-f9d7124a62c4" (UID: "71cc5f0f-289e-4f3a-9632-f9d7124a62c4"). InnerVolumeSpecName "kube-api-access-nd62g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.150985 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71cc5f0f-289e-4f3a-9632-f9d7124a62c4" (UID: "71cc5f0f-289e-4f3a-9632-f9d7124a62c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.225019 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.225050 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd62g\" (UniqueName: \"kubernetes.io/projected/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-kube-api-access-nd62g\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.225063 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71cc5f0f-289e-4f3a-9632-f9d7124a62c4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.694343 4849 generic.go:334] "Generic (PLEG): container finished" podID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerID="6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31" exitCode=0 Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.694395 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qcvbt" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.694416 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerDied","Data":"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31"} Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.694460 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qcvbt" event={"ID":"71cc5f0f-289e-4f3a-9632-f9d7124a62c4","Type":"ContainerDied","Data":"11309d4f694f2be6a1dc31a299faabb09f6c78aba2037533f0a61ccf7e848123"} Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.694478 4849 scope.go:117] "RemoveContainer" containerID="6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.697186 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" event={"ID":"1f0eee16-27ac-4f1d-a968-5424af3ee3d2","Type":"ContainerDied","Data":"9cd44988258183c745162f7d0642727e7f4f0719e7f6ed50c4eba8be956cf946"} Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.697303 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cd44988258183c745162f7d0642727e7f4f0719e7f6ed50c4eba8be956cf946" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.697233 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.715373 4849 scope.go:117] "RemoveContainer" containerID="055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.728456 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.732347 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qcvbt"] Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.750692 4849 scope.go:117] "RemoveContainer" containerID="2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.763176 4849 scope.go:117] "RemoveContainer" containerID="6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31" Dec 03 12:35:57 crc kubenswrapper[4849]: E1203 12:35:57.763543 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31\": container with ID starting with 6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31 not found: ID does not exist" containerID="6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.763597 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31"} err="failed to get container status \"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31\": rpc error: code = NotFound desc = could not find container \"6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31\": container with ID starting with 6ab66cc8b58d9d2d7aebd77afe841ed21ba101ba90d72a1669325a3d546ced31 not found: ID does not exist" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.763617 4849 scope.go:117] "RemoveContainer" containerID="055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c" Dec 03 12:35:57 crc kubenswrapper[4849]: E1203 12:35:57.763922 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c\": container with ID starting with 055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c not found: ID does not exist" containerID="055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.763953 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c"} err="failed to get container status \"055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c\": rpc error: code = NotFound desc = could not find container \"055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c\": container with ID starting with 055415ea35f76ee94d9640c3933cec46debf2dd526bc0b3c468a37c10e7c590c not found: ID does not exist" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.763973 4849 scope.go:117] "RemoveContainer" containerID="2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94" Dec 03 12:35:57 crc kubenswrapper[4849]: E1203 12:35:57.764233 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94\": container with ID starting with 2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94 not found: ID does not exist" containerID="2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.764260 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94"} err="failed to get container status \"2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94\": rpc error: code = NotFound desc = could not find container \"2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94\": container with ID starting with 2bc31ca748ec7af5fda225f2d8fbe1fa966fbd0b8afa61150cceb1d20b0e3c94 not found: ID does not exist" Dec 03 12:35:57 crc kubenswrapper[4849]: I1203 12:35:57.863025 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" path="/var/lib/kubelet/pods/71cc5f0f-289e-4f3a-9632-f9d7124a62c4/volumes" Dec 03 12:36:00 crc kubenswrapper[4849]: I1203 12:36:00.965392 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:36:00 crc kubenswrapper[4849]: I1203 12:36:00.966607 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n8fwv" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="registry-server" containerID="cri-o://473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea" gracePeriod=2 Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.298058 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.482333 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7m5q\" (UniqueName: \"kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q\") pod \"72cc2472-992b-46c7-aee8-a64202fd596a\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.482379 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities\") pod \"72cc2472-992b-46c7-aee8-a64202fd596a\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.482413 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content\") pod \"72cc2472-992b-46c7-aee8-a64202fd596a\" (UID: \"72cc2472-992b-46c7-aee8-a64202fd596a\") " Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.483150 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities" (OuterVolumeSpecName: "utilities") pod "72cc2472-992b-46c7-aee8-a64202fd596a" (UID: "72cc2472-992b-46c7-aee8-a64202fd596a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.489911 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q" (OuterVolumeSpecName: "kube-api-access-c7m5q") pod "72cc2472-992b-46c7-aee8-a64202fd596a" (UID: "72cc2472-992b-46c7-aee8-a64202fd596a"). InnerVolumeSpecName "kube-api-access-c7m5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.546584 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72cc2472-992b-46c7-aee8-a64202fd596a" (UID: "72cc2472-992b-46c7-aee8-a64202fd596a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.583946 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7m5q\" (UniqueName: \"kubernetes.io/projected/72cc2472-992b-46c7-aee8-a64202fd596a-kube-api-access-c7m5q\") on node \"crc\" DevicePath \"\"" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.583975 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.583985 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72cc2472-992b-46c7-aee8-a64202fd596a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.720719 4849 generic.go:334] "Generic (PLEG): container finished" podID="72cc2472-992b-46c7-aee8-a64202fd596a" containerID="473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea" exitCode=0 Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.720757 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerDied","Data":"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea"} Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.720805 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n8fwv" event={"ID":"72cc2472-992b-46c7-aee8-a64202fd596a","Type":"ContainerDied","Data":"3accb7c5776137851543dd0580dcecfc1401a6719a893754f79796c9433a9863"} Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.720819 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n8fwv" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.720823 4849 scope.go:117] "RemoveContainer" containerID="473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.734758 4849 scope.go:117] "RemoveContainer" containerID="fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.748740 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.753338 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n8fwv"] Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.766291 4849 scope.go:117] "RemoveContainer" containerID="4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.780294 4849 scope.go:117] "RemoveContainer" containerID="473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea" Dec 03 12:36:01 crc kubenswrapper[4849]: E1203 12:36:01.780771 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea\": container with ID starting with 473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea not found: ID does not exist" containerID="473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.780872 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea"} err="failed to get container status \"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea\": rpc error: code = NotFound desc = could not find container \"473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea\": container with ID starting with 473eb0181f8055c56a8322ec5275cf412f1dba7d8879e72345993c7e9fde68ea not found: ID does not exist" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.780949 4849 scope.go:117] "RemoveContainer" containerID="fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8" Dec 03 12:36:01 crc kubenswrapper[4849]: E1203 12:36:01.781209 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8\": container with ID starting with fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8 not found: ID does not exist" containerID="fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.781231 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8"} err="failed to get container status \"fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8\": rpc error: code = NotFound desc = could not find container \"fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8\": container with ID starting with fdaa73608249b722de296406f724214cc7df11996241f1a5b0555c80e0fa31c8 not found: ID does not exist" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.781246 4849 scope.go:117] "RemoveContainer" containerID="4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5" Dec 03 12:36:01 crc kubenswrapper[4849]: E1203 12:36:01.781412 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5\": container with ID starting with 4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5 not found: ID does not exist" containerID="4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.781434 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5"} err="failed to get container status \"4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5\": rpc error: code = NotFound desc = could not find container \"4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5\": container with ID starting with 4fb36a5dcb1db2d25a4c86ad42b60415fe5c53932c55c37738e97ae22d2647e5 not found: ID does not exist" Dec 03 12:36:01 crc kubenswrapper[4849]: I1203 12:36:01.862430 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" path="/var/lib/kubelet/pods/72cc2472-992b-46c7-aee8-a64202fd596a/volumes" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120479 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4"] Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120904 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="pull" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120917 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="pull" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120925 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="extract" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120930 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="extract" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120943 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="extract-utilities" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120949 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="extract-utilities" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120962 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120968 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120983 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="extract-content" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.120988 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="extract-content" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.120997 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="util" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121002 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="util" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.121024 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="extract-content" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121028 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="extract-content" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.121041 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32181932-4430-45af-9b7f-4f1941d276c3" containerName="console" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121046 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="32181932-4430-45af-9b7f-4f1941d276c3" containerName="console" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.121056 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="extract-utilities" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121061 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="extract-utilities" Dec 03 12:36:08 crc kubenswrapper[4849]: E1203 12:36:08.121068 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121073 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121209 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="72cc2472-992b-46c7-aee8-a64202fd596a" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121221 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="71cc5f0f-289e-4f3a-9632-f9d7124a62c4" containerName="registry-server" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121230 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f0eee16-27ac-4f1d-a968-5424af3ee3d2" containerName="extract" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121239 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="32181932-4430-45af-9b7f-4f1941d276c3" containerName="console" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.121671 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.123151 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.123569 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2v5l4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.123577 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.123851 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.123875 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.137390 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4"] Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.188206 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-apiservice-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.188267 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-webhook-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.188373 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkklc\" (UniqueName: \"kubernetes.io/projected/9b7b5461-2704-44b1-abce-14ddfebec290-kube-api-access-zkklc\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.289273 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-apiservice-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.289322 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-webhook-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.289384 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkklc\" (UniqueName: \"kubernetes.io/projected/9b7b5461-2704-44b1-abce-14ddfebec290-kube-api-access-zkklc\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.294310 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-webhook-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.302143 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9b7b5461-2704-44b1-abce-14ddfebec290-apiservice-cert\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.318703 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkklc\" (UniqueName: \"kubernetes.io/projected/9b7b5461-2704-44b1-abce-14ddfebec290-kube-api-access-zkklc\") pod \"metallb-operator-controller-manager-7fcb955f57-kpvw4\" (UID: \"9b7b5461-2704-44b1-abce-14ddfebec290\") " pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.377006 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth"] Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.378099 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.380554 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.380575 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.380955 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-nsk28" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.391159 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64zt7\" (UniqueName: \"kubernetes.io/projected/008d5799-334c-4ee1-af23-c7d44925cd90-kube-api-access-64zt7\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.391194 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-webhook-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.391250 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-apiservice-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.399715 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth"] Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.435237 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.491823 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64zt7\" (UniqueName: \"kubernetes.io/projected/008d5799-334c-4ee1-af23-c7d44925cd90-kube-api-access-64zt7\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.492120 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-webhook-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.492580 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-apiservice-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.494784 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-webhook-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.501008 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/008d5799-334c-4ee1-af23-c7d44925cd90-apiservice-cert\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.514463 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64zt7\" (UniqueName: \"kubernetes.io/projected/008d5799-334c-4ee1-af23-c7d44925cd90-kube-api-access-64zt7\") pod \"metallb-operator-webhook-server-77f6f7b67b-d7sth\" (UID: \"008d5799-334c-4ee1-af23-c7d44925cd90\") " pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.691282 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:08 crc kubenswrapper[4849]: I1203 12:36:08.813274 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4"] Dec 03 12:36:08 crc kubenswrapper[4849]: W1203 12:36:08.819070 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b7b5461_2704_44b1_abce_14ddfebec290.slice/crio-ff4107f5ece46e0ecbbab8fa3453c1b14e32fdfd1e71c7d1a421fb1a340901bf WatchSource:0}: Error finding container ff4107f5ece46e0ecbbab8fa3453c1b14e32fdfd1e71c7d1a421fb1a340901bf: Status 404 returned error can't find the container with id ff4107f5ece46e0ecbbab8fa3453c1b14e32fdfd1e71c7d1a421fb1a340901bf Dec 03 12:36:09 crc kubenswrapper[4849]: I1203 12:36:09.073259 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth"] Dec 03 12:36:09 crc kubenswrapper[4849]: W1203 12:36:09.076072 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod008d5799_334c_4ee1_af23_c7d44925cd90.slice/crio-404d3c3dfaac9aa10784bbe2130e195555fe7fdf35433b90ec977af844e849eb WatchSource:0}: Error finding container 404d3c3dfaac9aa10784bbe2130e195555fe7fdf35433b90ec977af844e849eb: Status 404 returned error can't find the container with id 404d3c3dfaac9aa10784bbe2130e195555fe7fdf35433b90ec977af844e849eb Dec 03 12:36:09 crc kubenswrapper[4849]: I1203 12:36:09.786845 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" event={"ID":"008d5799-334c-4ee1-af23-c7d44925cd90","Type":"ContainerStarted","Data":"404d3c3dfaac9aa10784bbe2130e195555fe7fdf35433b90ec977af844e849eb"} Dec 03 12:36:09 crc kubenswrapper[4849]: I1203 12:36:09.787944 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" event={"ID":"9b7b5461-2704-44b1-abce-14ddfebec290","Type":"ContainerStarted","Data":"ff4107f5ece46e0ecbbab8fa3453c1b14e32fdfd1e71c7d1a421fb1a340901bf"} Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.817547 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" event={"ID":"9b7b5461-2704-44b1-abce-14ddfebec290","Type":"ContainerStarted","Data":"ce624331acd4a96b495949767e325636bf9e0ee4d63baaed8bcf8eb3acf00791"} Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.817942 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.818954 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" event={"ID":"008d5799-334c-4ee1-af23-c7d44925cd90","Type":"ContainerStarted","Data":"6a839daf6b968ee6d97cea9bfd1584522d106fa34ca963b5c654036d70f9d154"} Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.819097 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.833938 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" podStartSLOduration=1.5531396549999998 podStartE2EDuration="5.833925847s" podCreationTimestamp="2025-12-03 12:36:08 +0000 UTC" firstStartedPulling="2025-12-03 12:36:08.822724369 +0000 UTC m=+915.284572152" lastFinishedPulling="2025-12-03 12:36:13.103510561 +0000 UTC m=+919.565358344" observedRunningTime="2025-12-03 12:36:13.832421469 +0000 UTC m=+920.294269252" watchObservedRunningTime="2025-12-03 12:36:13.833925847 +0000 UTC m=+920.295773620" Dec 03 12:36:13 crc kubenswrapper[4849]: I1203 12:36:13.853406 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" podStartSLOduration=1.8119862690000001 podStartE2EDuration="5.853391505s" podCreationTimestamp="2025-12-03 12:36:08 +0000 UTC" firstStartedPulling="2025-12-03 12:36:09.077397305 +0000 UTC m=+915.539245088" lastFinishedPulling="2025-12-03 12:36:13.118802541 +0000 UTC m=+919.580650324" observedRunningTime="2025-12-03 12:36:13.849844316 +0000 UTC m=+920.311692149" watchObservedRunningTime="2025-12-03 12:36:13.853391505 +0000 UTC m=+920.315239288" Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.677554 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.677961 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.678001 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.678485 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.678536 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe" gracePeriod=600 Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.872726 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe" exitCode=0 Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.872765 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe"} Dec 03 12:36:22 crc kubenswrapper[4849]: I1203 12:36:22.872801 4849 scope.go:117] "RemoveContainer" containerID="2ef7a020e9553af217991c2be82bd8c7cbc4859d782a4f813dbd020c01097d67" Dec 03 12:36:23 crc kubenswrapper[4849]: I1203 12:36:23.879862 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec"} Dec 03 12:36:28 crc kubenswrapper[4849]: I1203 12:36:28.696778 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-77f6f7b67b-d7sth" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.437716 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7fcb955f57-kpvw4" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.938997 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-kgj8v"] Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.941432 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.945000 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.945077 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.945371 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-zt454" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.951185 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng"] Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.951991 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.953665 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 12:36:48 crc kubenswrapper[4849]: I1203 12:36:48.965192 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng"] Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.013010 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.013108 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-sockets\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.013147 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics-certs\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.013166 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-startup\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.013190 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-reloader\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.016448 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-conf\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.016544 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.016636 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4hmn\" (UniqueName: \"kubernetes.io/projected/b6b66905-7716-42b4-94a6-4de28876a7d6-kube-api-access-h4hmn\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.016685 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmxxl\" (UniqueName: \"kubernetes.io/projected/c6d0dda9-8382-472b-903b-8664e57f3fc5-kube-api-access-zmxxl\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.018377 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-dchnp"] Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.022988 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.024746 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.024941 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.025118 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-bbz97" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.025259 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.027828 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-fw2c2"] Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.028970 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.030685 4849 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.044902 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-fw2c2"] Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118137 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118289 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjmz2\" (UniqueName: \"kubernetes.io/projected/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-kube-api-access-rjmz2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118325 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-sockets\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118365 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics-certs\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118397 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-startup\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118438 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-reloader\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118522 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-metrics-certs\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118567 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118606 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metallb-excludel2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118633 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-conf\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118730 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metrics-certs\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118772 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118821 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-cert\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118861 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtg7j\" (UniqueName: \"kubernetes.io/projected/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-kube-api-access-qtg7j\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118886 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4hmn\" (UniqueName: \"kubernetes.io/projected/b6b66905-7716-42b4-94a6-4de28876a7d6-kube-api-access-h4hmn\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118917 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmxxl\" (UniqueName: \"kubernetes.io/projected/c6d0dda9-8382-472b-903b-8664e57f3fc5-kube-api-access-zmxxl\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.118972 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-reloader\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.119012 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-conf\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.119087 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.119169 4849 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.119224 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert podName:b6b66905-7716-42b4-94a6-4de28876a7d6 nodeName:}" failed. No retries permitted until 2025-12-03 12:36:49.619209167 +0000 UTC m=+956.081056939 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert") pod "frr-k8s-webhook-server-7fcb986d4-6w6ng" (UID: "b6b66905-7716-42b4-94a6-4de28876a7d6") : secret "frr-k8s-webhook-server-cert" not found Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.119333 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-startup\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.119434 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/c6d0dda9-8382-472b-903b-8664e57f3fc5-frr-sockets\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.123351 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c6d0dda9-8382-472b-903b-8664e57f3fc5-metrics-certs\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.133782 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmxxl\" (UniqueName: \"kubernetes.io/projected/c6d0dda9-8382-472b-903b-8664e57f3fc5-kube-api-access-zmxxl\") pod \"frr-k8s-kgj8v\" (UID: \"c6d0dda9-8382-472b-903b-8664e57f3fc5\") " pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.149729 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4hmn\" (UniqueName: \"kubernetes.io/projected/b6b66905-7716-42b4-94a6-4de28876a7d6-kube-api-access-h4hmn\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220393 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-metrics-certs\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220445 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220477 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metallb-excludel2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220517 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metrics-certs\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220567 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-cert\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220597 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtg7j\" (UniqueName: \"kubernetes.io/projected/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-kube-api-access-qtg7j\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.220613 4849 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.220671 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjmz2\" (UniqueName: \"kubernetes.io/projected/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-kube-api-access-rjmz2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.220697 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist podName:84e9fe4d-6c68-464b-83f4-ba0889ae9b73 nodeName:}" failed. No retries permitted until 2025-12-03 12:36:49.720681016 +0000 UTC m=+956.182528799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist") pod "speaker-dchnp" (UID: "84e9fe4d-6c68-464b-83f4-ba0889ae9b73") : secret "metallb-memberlist" not found Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.221199 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metallb-excludel2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.223486 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-cert\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.223844 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-metrics-certs\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.227158 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-metrics-certs\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.235865 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjmz2\" (UniqueName: \"kubernetes.io/projected/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-kube-api-access-rjmz2\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.237228 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtg7j\" (UniqueName: \"kubernetes.io/projected/069cef18-23c1-4c6f-b0e0-7fcab99a1d52-kube-api-access-qtg7j\") pod \"controller-f8648f98b-fw2c2\" (UID: \"069cef18-23c1-4c6f-b0e0-7fcab99a1d52\") " pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.257312 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.345742 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.627180 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.632420 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6b66905-7716-42b4-94a6-4de28876a7d6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-6w6ng\" (UID: \"b6b66905-7716-42b4-94a6-4de28876a7d6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.722043 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-fw2c2"] Dec 03 12:36:49 crc kubenswrapper[4849]: W1203 12:36:49.725434 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod069cef18_23c1_4c6f_b0e0_7fcab99a1d52.slice/crio-2aec626552652f7dbfe84fe2fc30fbc1009056707625d59e721cdde12449ea3b WatchSource:0}: Error finding container 2aec626552652f7dbfe84fe2fc30fbc1009056707625d59e721cdde12449ea3b: Status 404 returned error can't find the container with id 2aec626552652f7dbfe84fe2fc30fbc1009056707625d59e721cdde12449ea3b Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.728135 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.728251 4849 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 12:36:49 crc kubenswrapper[4849]: E1203 12:36:49.728328 4849 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist podName:84e9fe4d-6c68-464b-83f4-ba0889ae9b73 nodeName:}" failed. No retries permitted until 2025-12-03 12:36:50.728313044 +0000 UTC m=+957.190160827 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist") pod "speaker-dchnp" (UID: "84e9fe4d-6c68-464b-83f4-ba0889ae9b73") : secret "metallb-memberlist" not found Dec 03 12:36:49 crc kubenswrapper[4849]: I1203 12:36:49.876112 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.046170 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fw2c2" event={"ID":"069cef18-23c1-4c6f-b0e0-7fcab99a1d52","Type":"ContainerStarted","Data":"269570a70f920d81d8509b3c194f662b5e8a975f7d2ade3d4a90474ce30db40b"} Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.046213 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fw2c2" event={"ID":"069cef18-23c1-4c6f-b0e0-7fcab99a1d52","Type":"ContainerStarted","Data":"a4fde16024dafaa757f5b8481e221e127800050a8e941d83e7219517fcba86ce"} Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.046222 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-fw2c2" event={"ID":"069cef18-23c1-4c6f-b0e0-7fcab99a1d52","Type":"ContainerStarted","Data":"2aec626552652f7dbfe84fe2fc30fbc1009056707625d59e721cdde12449ea3b"} Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.047123 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.052011 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"1fdb27bc7729862d9be37de4e24db1ae5f4025435d2c66f1b940748f6e98de0a"} Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.080142 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-fw2c2" podStartSLOduration=1.080128258 podStartE2EDuration="1.080128258s" podCreationTimestamp="2025-12-03 12:36:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:36:50.077244386 +0000 UTC m=+956.539092169" watchObservedRunningTime="2025-12-03 12:36:50.080128258 +0000 UTC m=+956.541976041" Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.220866 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng"] Dec 03 12:36:50 crc kubenswrapper[4849]: W1203 12:36:50.222984 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6b66905_7716_42b4_94a6_4de28876a7d6.slice/crio-87017ba66d2f3466442336e26628d889542f16bfd20654a03072fd2a8e6c053b WatchSource:0}: Error finding container 87017ba66d2f3466442336e26628d889542f16bfd20654a03072fd2a8e6c053b: Status 404 returned error can't find the container with id 87017ba66d2f3466442336e26628d889542f16bfd20654a03072fd2a8e6c053b Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.744325 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.749063 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84e9fe4d-6c68-464b-83f4-ba0889ae9b73-memberlist\") pod \"speaker-dchnp\" (UID: \"84e9fe4d-6c68-464b-83f4-ba0889ae9b73\") " pod="metallb-system/speaker-dchnp" Dec 03 12:36:50 crc kubenswrapper[4849]: I1203 12:36:50.838777 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dchnp" Dec 03 12:36:50 crc kubenswrapper[4849]: W1203 12:36:50.859243 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84e9fe4d_6c68_464b_83f4_ba0889ae9b73.slice/crio-afbc95ab760f6514de1c55df2ba82e413a4cbc09939914b2ef9320babe6a70fa WatchSource:0}: Error finding container afbc95ab760f6514de1c55df2ba82e413a4cbc09939914b2ef9320babe6a70fa: Status 404 returned error can't find the container with id afbc95ab760f6514de1c55df2ba82e413a4cbc09939914b2ef9320babe6a70fa Dec 03 12:36:51 crc kubenswrapper[4849]: I1203 12:36:51.061697 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" event={"ID":"b6b66905-7716-42b4-94a6-4de28876a7d6","Type":"ContainerStarted","Data":"87017ba66d2f3466442336e26628d889542f16bfd20654a03072fd2a8e6c053b"} Dec 03 12:36:51 crc kubenswrapper[4849]: I1203 12:36:51.063059 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dchnp" event={"ID":"84e9fe4d-6c68-464b-83f4-ba0889ae9b73","Type":"ContainerStarted","Data":"90f9954d344b32a643bb5ef171f6354bba91fa73afa134b49536664ab1383be5"} Dec 03 12:36:51 crc kubenswrapper[4849]: I1203 12:36:51.063077 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dchnp" event={"ID":"84e9fe4d-6c68-464b-83f4-ba0889ae9b73","Type":"ContainerStarted","Data":"afbc95ab760f6514de1c55df2ba82e413a4cbc09939914b2ef9320babe6a70fa"} Dec 03 12:36:52 crc kubenswrapper[4849]: I1203 12:36:52.086427 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dchnp" event={"ID":"84e9fe4d-6c68-464b-83f4-ba0889ae9b73","Type":"ContainerStarted","Data":"f80785132f569cd949879d6c89b709e915820ffd495baa9c4d7569bed5c876b3"} Dec 03 12:36:52 crc kubenswrapper[4849]: I1203 12:36:52.103305 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-dchnp" podStartSLOduration=4.103289889 podStartE2EDuration="4.103289889s" podCreationTimestamp="2025-12-03 12:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 12:36:52.101896218 +0000 UTC m=+958.563744002" watchObservedRunningTime="2025-12-03 12:36:52.103289889 +0000 UTC m=+958.565137672" Dec 03 12:36:53 crc kubenswrapper[4849]: I1203 12:36:53.097238 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-dchnp" Dec 03 12:36:56 crc kubenswrapper[4849]: I1203 12:36:56.115576 4849 generic.go:334] "Generic (PLEG): container finished" podID="c6d0dda9-8382-472b-903b-8664e57f3fc5" containerID="7d45e8ab9bce5b9570594e1894b3ffbbce04e95978349ec6c650b5b35b234b88" exitCode=0 Dec 03 12:36:56 crc kubenswrapper[4849]: I1203 12:36:56.115775 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerDied","Data":"7d45e8ab9bce5b9570594e1894b3ffbbce04e95978349ec6c650b5b35b234b88"} Dec 03 12:36:56 crc kubenswrapper[4849]: I1203 12:36:56.117747 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" event={"ID":"b6b66905-7716-42b4-94a6-4de28876a7d6","Type":"ContainerStarted","Data":"9503895db91e9f7c1ea40d885429ec6155d750e2e7485c7c4d659ef50f0a6ec5"} Dec 03 12:36:56 crc kubenswrapper[4849]: I1203 12:36:56.118103 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:36:56 crc kubenswrapper[4849]: I1203 12:36:56.143768 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" podStartSLOduration=2.82618394 podStartE2EDuration="8.143751982s" podCreationTimestamp="2025-12-03 12:36:48 +0000 UTC" firstStartedPulling="2025-12-03 12:36:50.224984114 +0000 UTC m=+956.686831897" lastFinishedPulling="2025-12-03 12:36:55.542552155 +0000 UTC m=+962.004399939" observedRunningTime="2025-12-03 12:36:56.141266369 +0000 UTC m=+962.603114162" watchObservedRunningTime="2025-12-03 12:36:56.143751982 +0000 UTC m=+962.605599765" Dec 03 12:36:57 crc kubenswrapper[4849]: I1203 12:36:57.124607 4849 generic.go:334] "Generic (PLEG): container finished" podID="c6d0dda9-8382-472b-903b-8664e57f3fc5" containerID="d25609772ca8f29a86d76cdf9e06327bb757c730a8ea1cba7167226b174118b1" exitCode=0 Dec 03 12:36:57 crc kubenswrapper[4849]: I1203 12:36:57.124683 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerDied","Data":"d25609772ca8f29a86d76cdf9e06327bb757c730a8ea1cba7167226b174118b1"} Dec 03 12:36:58 crc kubenswrapper[4849]: I1203 12:36:58.131692 4849 generic.go:334] "Generic (PLEG): container finished" podID="c6d0dda9-8382-472b-903b-8664e57f3fc5" containerID="48076d73bb9b18ce85362d6f3b4aa5c8645a9675655d923a2a6f5e725b0b5026" exitCode=0 Dec 03 12:36:58 crc kubenswrapper[4849]: I1203 12:36:58.131798 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerDied","Data":"48076d73bb9b18ce85362d6f3b4aa5c8645a9675655d923a2a6f5e725b0b5026"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.139919 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"c2975c98d311cbc489a7e487007f109df32232eaf0b57bb2f0cc646962400b8e"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140121 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"d4e28c311aa58d400c2359555e64a3d023ec93a7d35ca6502c6b1e1a53c9c87e"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140131 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"bdd54ad720ee1e4640115b1341de92e61e2d912ed54c98282c4e7979cba975bb"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140139 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"6a97f7af00cc6f204def89ab92d047eaf61e3ce51f862f4d29277f181bd9e6ca"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140146 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"4acbc8be2335105bae8f8fa598483c64b9ca581da524cb6094e837b3b8e50bdc"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140155 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-kgj8v" event={"ID":"c6d0dda9-8382-472b-903b-8664e57f3fc5","Type":"ContainerStarted","Data":"e4aee0e9020a92ed83559ba2b18bd3f69c5357e059fe0b8751c9a0d9c0119c3c"} Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.140767 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.157713 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-kgj8v" podStartSLOduration=4.951639485 podStartE2EDuration="11.157697554s" podCreationTimestamp="2025-12-03 12:36:48 +0000 UTC" firstStartedPulling="2025-12-03 12:36:49.353162356 +0000 UTC m=+955.815010139" lastFinishedPulling="2025-12-03 12:36:55.559220425 +0000 UTC m=+962.021068208" observedRunningTime="2025-12-03 12:36:59.154799305 +0000 UTC m=+965.616647088" watchObservedRunningTime="2025-12-03 12:36:59.157697554 +0000 UTC m=+965.619545338" Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.257879 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.287192 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:36:59 crc kubenswrapper[4849]: I1203 12:36:59.349065 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-fw2c2" Dec 03 12:37:09 crc kubenswrapper[4849]: I1203 12:37:09.259604 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-kgj8v" Dec 03 12:37:09 crc kubenswrapper[4849]: I1203 12:37:09.881409 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-6w6ng" Dec 03 12:37:10 crc kubenswrapper[4849]: I1203 12:37:10.841910 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-dchnp" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.016215 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.017129 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.018572 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.019661 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-llwd6" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.019894 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.030543 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.104629 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mqzf\" (UniqueName: \"kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf\") pod \"openstack-operator-index-4wbx5\" (UID: \"c4d44797-3cc0-418d-9dcb-caa5b681f781\") " pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.206018 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mqzf\" (UniqueName: \"kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf\") pod \"openstack-operator-index-4wbx5\" (UID: \"c4d44797-3cc0-418d-9dcb-caa5b681f781\") " pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.222664 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mqzf\" (UniqueName: \"kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf\") pod \"openstack-operator-index-4wbx5\" (UID: \"c4d44797-3cc0-418d-9dcb-caa5b681f781\") " pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.345542 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:37:13 crc kubenswrapper[4849]: I1203 12:37:13.685828 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:37:13 crc kubenswrapper[4849]: W1203 12:37:13.688288 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4d44797_3cc0_418d_9dcb_caa5b681f781.slice/crio-47982a3b1018acd5b57d2b654ce0f41abca2e6fb9b07560e862b279db8a31446 WatchSource:0}: Error finding container 47982a3b1018acd5b57d2b654ce0f41abca2e6fb9b07560e862b279db8a31446: Status 404 returned error can't find the container with id 47982a3b1018acd5b57d2b654ce0f41abca2e6fb9b07560e862b279db8a31446 Dec 03 12:37:14 crc kubenswrapper[4849]: I1203 12:37:14.224232 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4wbx5" event={"ID":"c4d44797-3cc0-418d-9dcb-caa5b681f781","Type":"ContainerStarted","Data":"47982a3b1018acd5b57d2b654ce0f41abca2e6fb9b07560e862b279db8a31446"} Dec 03 12:37:16 crc kubenswrapper[4849]: I1203 12:37:16.394735 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.000058 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qjkcg"] Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.000911 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjkcg" Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.007358 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qjkcg"] Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.166107 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdp7z\" (UniqueName: \"kubernetes.io/projected/d93bf9a7-2e41-4abf-9ec8-04480010f205-kube-api-access-vdp7z\") pod \"openstack-operator-index-qjkcg\" (UID: \"d93bf9a7-2e41-4abf-9ec8-04480010f205\") " pod="openstack-operators/openstack-operator-index-qjkcg" Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.267197 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdp7z\" (UniqueName: \"kubernetes.io/projected/d93bf9a7-2e41-4abf-9ec8-04480010f205-kube-api-access-vdp7z\") pod \"openstack-operator-index-qjkcg\" (UID: \"d93bf9a7-2e41-4abf-9ec8-04480010f205\") " pod="openstack-operators/openstack-operator-index-qjkcg" Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.282337 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdp7z\" (UniqueName: \"kubernetes.io/projected/d93bf9a7-2e41-4abf-9ec8-04480010f205-kube-api-access-vdp7z\") pod \"openstack-operator-index-qjkcg\" (UID: \"d93bf9a7-2e41-4abf-9ec8-04480010f205\") " pod="openstack-operators/openstack-operator-index-qjkcg" Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.315479 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjkcg" Dec 03 12:37:17 crc kubenswrapper[4849]: I1203 12:37:17.666281 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qjkcg"] Dec 03 12:37:17 crc kubenswrapper[4849]: W1203 12:37:17.669112 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd93bf9a7_2e41_4abf_9ec8_04480010f205.slice/crio-a466d0ee304b0e88fcf97e2658dc77f5f4c0f561f978f2ddda31e3a26e4a9139 WatchSource:0}: Error finding container a466d0ee304b0e88fcf97e2658dc77f5f4c0f561f978f2ddda31e3a26e4a9139: Status 404 returned error can't find the container with id a466d0ee304b0e88fcf97e2658dc77f5f4c0f561f978f2ddda31e3a26e4a9139 Dec 03 12:37:18 crc kubenswrapper[4849]: I1203 12:37:18.250368 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qjkcg" event={"ID":"d93bf9a7-2e41-4abf-9ec8-04480010f205","Type":"ContainerStarted","Data":"a466d0ee304b0e88fcf97e2658dc77f5f4c0f561f978f2ddda31e3a26e4a9139"} Dec 03 12:38:22 crc kubenswrapper[4849]: I1203 12:38:22.677151 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:38:22 crc kubenswrapper[4849]: I1203 12:38:22.678142 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:38:52 crc kubenswrapper[4849]: I1203 12:38:52.677338 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:38:52 crc kubenswrapper[4849]: I1203 12:38:52.677728 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:39:13 crc kubenswrapper[4849]: E1203 12:39:13.695933 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:39:13 crc kubenswrapper[4849]: E1203 12:39:13.696307 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:39:13 crc kubenswrapper[4849]: E1203 12:39:13.696442 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8mqzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-4wbx5_openstack-operators(c4d44797-3cc0-418d-9dcb-caa5b681f781): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:39:13 crc kubenswrapper[4849]: E1203 12:39:13.698225 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-4wbx5" podUID="c4d44797-3cc0-418d-9dcb-caa5b681f781" Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.111861 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.177060 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mqzf\" (UniqueName: \"kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf\") pod \"c4d44797-3cc0-418d-9dcb-caa5b681f781\" (UID: \"c4d44797-3cc0-418d-9dcb-caa5b681f781\") " Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.182685 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf" (OuterVolumeSpecName: "kube-api-access-8mqzf") pod "c4d44797-3cc0-418d-9dcb-caa5b681f781" (UID: "c4d44797-3cc0-418d-9dcb-caa5b681f781"). InnerVolumeSpecName "kube-api-access-8mqzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.280894 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mqzf\" (UniqueName: \"kubernetes.io/projected/c4d44797-3cc0-418d-9dcb-caa5b681f781-kube-api-access-8mqzf\") on node \"crc\" DevicePath \"\"" Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.894454 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4wbx5" event={"ID":"c4d44797-3cc0-418d-9dcb-caa5b681f781","Type":"ContainerDied","Data":"47982a3b1018acd5b57d2b654ce0f41abca2e6fb9b07560e862b279db8a31446"} Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.894508 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4wbx5" Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.927947 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:39:14 crc kubenswrapper[4849]: I1203 12:39:14.932554 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-4wbx5"] Dec 03 12:39:15 crc kubenswrapper[4849]: I1203 12:39:15.864956 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4d44797-3cc0-418d-9dcb-caa5b681f781" path="/var/lib/kubelet/pods/c4d44797-3cc0-418d-9dcb-caa5b681f781/volumes" Dec 03 12:39:17 crc kubenswrapper[4849]: E1203 12:39:17.675538 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:39:17 crc kubenswrapper[4849]: E1203 12:39:17.675740 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:39:17 crc kubenswrapper[4849]: E1203 12:39:17.675872 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:39:17 crc kubenswrapper[4849]: E1203 12:39:17.677046 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:39:17 crc kubenswrapper[4849]: E1203 12:39:17.915534 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.677732 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.678037 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.678088 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.678520 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.678573 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec" gracePeriod=600 Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.940157 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec" exitCode=0 Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.940221 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec"} Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.940398 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443"} Dec 03 12:39:22 crc kubenswrapper[4849]: I1203 12:39:22.940417 4849 scope.go:117] "RemoveContainer" containerID="a625210ce9ae49de7cb766c5c6d666ea81b94e9a45eb02fc64652a282b61fdfe" Dec 03 12:41:22 crc kubenswrapper[4849]: I1203 12:41:22.677666 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:41:22 crc kubenswrapper[4849]: I1203 12:41:22.678059 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:41:29 crc kubenswrapper[4849]: E1203 12:41:29.861073 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:41:29 crc kubenswrapper[4849]: E1203 12:41:29.861373 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:41:29 crc kubenswrapper[4849]: E1203 12:41:29.861473 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:41:29 crc kubenswrapper[4849]: E1203 12:41:29.862985 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:41:42 crc kubenswrapper[4849]: E1203 12:41:42.858461 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:41:52 crc kubenswrapper[4849]: I1203 12:41:52.677046 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:41:52 crc kubenswrapper[4849]: I1203 12:41:52.677363 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:41:55 crc kubenswrapper[4849]: I1203 12:41:55.858357 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.676623 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.676983 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.677025 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.677426 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.677473 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443" gracePeriod=600 Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.905005 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443" exitCode=0 Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.905049 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443"} Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.905226 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a"} Dec 03 12:42:22 crc kubenswrapper[4849]: I1203 12:42:22.905247 4849 scope.go:117] "RemoveContainer" containerID="84d48755608b0a99063f1f59d8a87643257a6aa58a070bcd6b4cc2f67b6ed2ec" Dec 03 12:43:55 crc kubenswrapper[4849]: E1203 12:43:55.861542 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:43:55 crc kubenswrapper[4849]: E1203 12:43:55.861931 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:43:55 crc kubenswrapper[4849]: E1203 12:43:55.862080 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:43:55 crc kubenswrapper[4849]: E1203 12:43:55.863315 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:44:07 crc kubenswrapper[4849]: E1203 12:44:07.857657 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:44:18 crc kubenswrapper[4849]: E1203 12:44:18.858597 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:44:22 crc kubenswrapper[4849]: I1203 12:44:22.677829 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:44:22 crc kubenswrapper[4849]: I1203 12:44:22.678478 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:44:32 crc kubenswrapper[4849]: E1203 12:44:32.857913 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:44:52 crc kubenswrapper[4849]: I1203 12:44:52.678123 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:44:52 crc kubenswrapper[4849]: I1203 12:44:52.678459 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.129747 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568"] Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.131055 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.132285 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.133577 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.134826 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568"] Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.317953 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.318032 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42nm8\" (UniqueName: \"kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.318321 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.420369 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.420439 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42nm8\" (UniqueName: \"kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.420541 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.421563 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.424801 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.433774 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42nm8\" (UniqueName: \"kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8\") pod \"collect-profiles-29412765-6t568\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.451920 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:00 crc kubenswrapper[4849]: I1203 12:45:00.801954 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568"] Dec 03 12:45:01 crc kubenswrapper[4849]: I1203 12:45:01.739369 4849 generic.go:334] "Generic (PLEG): container finished" podID="ba504793-5d26-4b32-8bb8-f279c8c34c23" containerID="9730e12d57462d9d27139fc741ac7ac21a7acd1a28b0e3bde6a391184a7fe0b4" exitCode=0 Dec 03 12:45:01 crc kubenswrapper[4849]: I1203 12:45:01.739417 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" event={"ID":"ba504793-5d26-4b32-8bb8-f279c8c34c23","Type":"ContainerDied","Data":"9730e12d57462d9d27139fc741ac7ac21a7acd1a28b0e3bde6a391184a7fe0b4"} Dec 03 12:45:01 crc kubenswrapper[4849]: I1203 12:45:01.739564 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" event={"ID":"ba504793-5d26-4b32-8bb8-f279c8c34c23","Type":"ContainerStarted","Data":"49dadec35045515c47c7818a4d2f8420152c2151e5c8f29404a3db3ae1de6ad2"} Dec 03 12:45:02 crc kubenswrapper[4849]: I1203 12:45:02.973342 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.159937 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume\") pod \"ba504793-5d26-4b32-8bb8-f279c8c34c23\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.159991 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42nm8\" (UniqueName: \"kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8\") pod \"ba504793-5d26-4b32-8bb8-f279c8c34c23\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.160027 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume\") pod \"ba504793-5d26-4b32-8bb8-f279c8c34c23\" (UID: \"ba504793-5d26-4b32-8bb8-f279c8c34c23\") " Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.160637 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume" (OuterVolumeSpecName: "config-volume") pod "ba504793-5d26-4b32-8bb8-f279c8c34c23" (UID: "ba504793-5d26-4b32-8bb8-f279c8c34c23"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.161418 4849 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ba504793-5d26-4b32-8bb8-f279c8c34c23-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.165028 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ba504793-5d26-4b32-8bb8-f279c8c34c23" (UID: "ba504793-5d26-4b32-8bb8-f279c8c34c23"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.165161 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8" (OuterVolumeSpecName: "kube-api-access-42nm8") pod "ba504793-5d26-4b32-8bb8-f279c8c34c23" (UID: "ba504793-5d26-4b32-8bb8-f279c8c34c23"). InnerVolumeSpecName "kube-api-access-42nm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.263266 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42nm8\" (UniqueName: \"kubernetes.io/projected/ba504793-5d26-4b32-8bb8-f279c8c34c23-kube-api-access-42nm8\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.263294 4849 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ba504793-5d26-4b32-8bb8-f279c8c34c23-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.751126 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" event={"ID":"ba504793-5d26-4b32-8bb8-f279c8c34c23","Type":"ContainerDied","Data":"49dadec35045515c47c7818a4d2f8420152c2151e5c8f29404a3db3ae1de6ad2"} Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.751171 4849 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49dadec35045515c47c7818a4d2f8420152c2151e5c8f29404a3db3ae1de6ad2" Dec 03 12:45:03 crc kubenswrapper[4849]: I1203 12:45:03.751199 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412765-6t568" Dec 03 12:45:04 crc kubenswrapper[4849]: I1203 12:45:04.948839 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:04 crc kubenswrapper[4849]: E1203 12:45:04.949141 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba504793-5d26-4b32-8bb8-f279c8c34c23" containerName="collect-profiles" Dec 03 12:45:04 crc kubenswrapper[4849]: I1203 12:45:04.949154 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba504793-5d26-4b32-8bb8-f279c8c34c23" containerName="collect-profiles" Dec 03 12:45:04 crc kubenswrapper[4849]: I1203 12:45:04.949322 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba504793-5d26-4b32-8bb8-f279c8c34c23" containerName="collect-profiles" Dec 03 12:45:04 crc kubenswrapper[4849]: I1203 12:45:04.950306 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:04 crc kubenswrapper[4849]: I1203 12:45:04.958060 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.088632 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmwcb\" (UniqueName: \"kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.088838 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.088910 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.190613 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmwcb\" (UniqueName: \"kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.190735 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.190781 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.191149 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.191152 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.206020 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmwcb\" (UniqueName: \"kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb\") pod \"redhat-operators-2jcp9\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.263017 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.675774 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:05 crc kubenswrapper[4849]: W1203 12:45:05.676978 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa7f8c57_cfb5_4798_8634_f7a267adb53f.slice/crio-d9d6ba278e504018f36a3ca86949db0fe319d63ed391ef003f47bb9b68990770 WatchSource:0}: Error finding container d9d6ba278e504018f36a3ca86949db0fe319d63ed391ef003f47bb9b68990770: Status 404 returned error can't find the container with id d9d6ba278e504018f36a3ca86949db0fe319d63ed391ef003f47bb9b68990770 Dec 03 12:45:05 crc kubenswrapper[4849]: I1203 12:45:05.763446 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerStarted","Data":"d9d6ba278e504018f36a3ca86949db0fe319d63ed391ef003f47bb9b68990770"} Dec 03 12:45:06 crc kubenswrapper[4849]: I1203 12:45:06.770298 4849 generic.go:334] "Generic (PLEG): container finished" podID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerID="604045088cb13e5a81ce2b00ecfd5c112c026bc11b5fa8cd962ab712c72b77e6" exitCode=0 Dec 03 12:45:06 crc kubenswrapper[4849]: I1203 12:45:06.770362 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerDied","Data":"604045088cb13e5a81ce2b00ecfd5c112c026bc11b5fa8cd962ab712c72b77e6"} Dec 03 12:45:07 crc kubenswrapper[4849]: I1203 12:45:07.785016 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerStarted","Data":"29a1f1dde9221defb0a537de2a8307a1146fc176a417cbb237889ce9f519fabf"} Dec 03 12:45:08 crc kubenswrapper[4849]: I1203 12:45:08.791816 4849 generic.go:334] "Generic (PLEG): container finished" podID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerID="29a1f1dde9221defb0a537de2a8307a1146fc176a417cbb237889ce9f519fabf" exitCode=0 Dec 03 12:45:08 crc kubenswrapper[4849]: I1203 12:45:08.791858 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerDied","Data":"29a1f1dde9221defb0a537de2a8307a1146fc176a417cbb237889ce9f519fabf"} Dec 03 12:45:09 crc kubenswrapper[4849]: I1203 12:45:09.798469 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerStarted","Data":"e9184b76a6543276efbd5a54cd24faac6fc906218b6a49a0b16d0bfa93ff6852"} Dec 03 12:45:09 crc kubenswrapper[4849]: I1203 12:45:09.811903 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2jcp9" podStartSLOduration=3.338840204 podStartE2EDuration="5.811891047s" podCreationTimestamp="2025-12-03 12:45:04 +0000 UTC" firstStartedPulling="2025-12-03 12:45:06.771658634 +0000 UTC m=+1453.233506417" lastFinishedPulling="2025-12-03 12:45:09.244709477 +0000 UTC m=+1455.706557260" observedRunningTime="2025-12-03 12:45:09.809677374 +0000 UTC m=+1456.271525157" watchObservedRunningTime="2025-12-03 12:45:09.811891047 +0000 UTC m=+1456.273738830" Dec 03 12:45:15 crc kubenswrapper[4849]: I1203 12:45:15.263351 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:15 crc kubenswrapper[4849]: I1203 12:45:15.264365 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:15 crc kubenswrapper[4849]: I1203 12:45:15.295236 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:15 crc kubenswrapper[4849]: I1203 12:45:15.861783 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:15 crc kubenswrapper[4849]: I1203 12:45:15.893396 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:17 crc kubenswrapper[4849]: I1203 12:45:17.840569 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2jcp9" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="registry-server" containerID="cri-o://e9184b76a6543276efbd5a54cd24faac6fc906218b6a49a0b16d0bfa93ff6852" gracePeriod=2 Dec 03 12:45:18 crc kubenswrapper[4849]: I1203 12:45:18.852922 4849 generic.go:334] "Generic (PLEG): container finished" podID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerID="e9184b76a6543276efbd5a54cd24faac6fc906218b6a49a0b16d0bfa93ff6852" exitCode=0 Dec 03 12:45:18 crc kubenswrapper[4849]: I1203 12:45:18.853086 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerDied","Data":"e9184b76a6543276efbd5a54cd24faac6fc906218b6a49a0b16d0bfa93ff6852"} Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.255942 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.277826 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content\") pod \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.277891 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities\") pod \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.278016 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmwcb\" (UniqueName: \"kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb\") pod \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\" (UID: \"fa7f8c57-cfb5-4798-8634-f7a267adb53f\") " Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.278527 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities" (OuterVolumeSpecName: "utilities") pod "fa7f8c57-cfb5-4798-8634-f7a267adb53f" (UID: "fa7f8c57-cfb5-4798-8634-f7a267adb53f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.279072 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.283915 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb" (OuterVolumeSpecName: "kube-api-access-kmwcb") pod "fa7f8c57-cfb5-4798-8634-f7a267adb53f" (UID: "fa7f8c57-cfb5-4798-8634-f7a267adb53f"). InnerVolumeSpecName "kube-api-access-kmwcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.358787 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa7f8c57-cfb5-4798-8634-f7a267adb53f" (UID: "fa7f8c57-cfb5-4798-8634-f7a267adb53f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.380513 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa7f8c57-cfb5-4798-8634-f7a267adb53f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.380556 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmwcb\" (UniqueName: \"kubernetes.io/projected/fa7f8c57-cfb5-4798-8634-f7a267adb53f-kube-api-access-kmwcb\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.862797 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jcp9" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.863194 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jcp9" event={"ID":"fa7f8c57-cfb5-4798-8634-f7a267adb53f","Type":"ContainerDied","Data":"d9d6ba278e504018f36a3ca86949db0fe319d63ed391ef003f47bb9b68990770"} Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.863245 4849 scope.go:117] "RemoveContainer" containerID="e9184b76a6543276efbd5a54cd24faac6fc906218b6a49a0b16d0bfa93ff6852" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.877210 4849 scope.go:117] "RemoveContainer" containerID="29a1f1dde9221defb0a537de2a8307a1146fc176a417cbb237889ce9f519fabf" Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.894425 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.898391 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2jcp9"] Dec 03 12:45:19 crc kubenswrapper[4849]: I1203 12:45:19.914293 4849 scope.go:117] "RemoveContainer" containerID="604045088cb13e5a81ce2b00ecfd5c112c026bc11b5fa8cd962ab712c72b77e6" Dec 03 12:45:21 crc kubenswrapper[4849]: I1203 12:45:21.863213 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" path="/var/lib/kubelet/pods/fa7f8c57-cfb5-4798-8634-f7a267adb53f/volumes" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.677520 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.677768 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.677814 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.678414 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.678467 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" gracePeriod=600 Dec 03 12:45:22 crc kubenswrapper[4849]: E1203 12:45:22.798833 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.880845 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" exitCode=0 Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.881416 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a"} Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.881548 4849 scope.go:117] "RemoveContainer" containerID="17cfa50513f0f846330dabe7efc9363600e28fbd7db242167412f49b4da1f443" Dec 03 12:45:22 crc kubenswrapper[4849]: I1203 12:45:22.882164 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:45:22 crc kubenswrapper[4849]: E1203 12:45:22.882373 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.258565 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:35 crc kubenswrapper[4849]: E1203 12:45:35.259199 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="extract-utilities" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.259211 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="extract-utilities" Dec 03 12:45:35 crc kubenswrapper[4849]: E1203 12:45:35.259221 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="registry-server" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.259226 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="registry-server" Dec 03 12:45:35 crc kubenswrapper[4849]: E1203 12:45:35.259240 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="extract-content" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.259246 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="extract-content" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.259375 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa7f8c57-cfb5-4798-8634-f7a267adb53f" containerName="registry-server" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.260290 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.271085 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.290510 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-899wg\" (UniqueName: \"kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.290579 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.290732 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.392098 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-899wg\" (UniqueName: \"kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.392190 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.392327 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.392635 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.392699 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.406911 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-899wg\" (UniqueName: \"kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg\") pod \"redhat-marketplace-w5cj2\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.573808 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.945917 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:35 crc kubenswrapper[4849]: I1203 12:45:35.961316 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerStarted","Data":"0ac982858258705d15262bd0b4f398775ca0e8bdf68c9cb8170db0e7e6540921"} Dec 03 12:45:36 crc kubenswrapper[4849]: I1203 12:45:36.967614 4849 generic.go:334] "Generic (PLEG): container finished" podID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerID="ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7" exitCode=0 Dec 03 12:45:36 crc kubenswrapper[4849]: I1203 12:45:36.967712 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerDied","Data":"ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7"} Dec 03 12:45:37 crc kubenswrapper[4849]: I1203 12:45:37.856666 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:45:37 crc kubenswrapper[4849]: E1203 12:45:37.857102 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:45:37 crc kubenswrapper[4849]: I1203 12:45:37.975504 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerStarted","Data":"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd"} Dec 03 12:45:38 crc kubenswrapper[4849]: I1203 12:45:38.982569 4849 generic.go:334] "Generic (PLEG): container finished" podID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerID="3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd" exitCode=0 Dec 03 12:45:38 crc kubenswrapper[4849]: I1203 12:45:38.982604 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerDied","Data":"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd"} Dec 03 12:45:39 crc kubenswrapper[4849]: I1203 12:45:39.990344 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerStarted","Data":"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a"} Dec 03 12:45:40 crc kubenswrapper[4849]: I1203 12:45:40.006789 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w5cj2" podStartSLOduration=2.560545758 podStartE2EDuration="5.006776216s" podCreationTimestamp="2025-12-03 12:45:35 +0000 UTC" firstStartedPulling="2025-12-03 12:45:36.968958295 +0000 UTC m=+1483.430806077" lastFinishedPulling="2025-12-03 12:45:39.415188751 +0000 UTC m=+1485.877036535" observedRunningTime="2025-12-03 12:45:40.002409925 +0000 UTC m=+1486.464257707" watchObservedRunningTime="2025-12-03 12:45:40.006776216 +0000 UTC m=+1486.468623998" Dec 03 12:45:45 crc kubenswrapper[4849]: I1203 12:45:45.574015 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:45 crc kubenswrapper[4849]: I1203 12:45:45.574346 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:45 crc kubenswrapper[4849]: I1203 12:45:45.603541 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:46 crc kubenswrapper[4849]: I1203 12:45:46.048079 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:46 crc kubenswrapper[4849]: I1203 12:45:46.080791 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.032869 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w5cj2" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="registry-server" containerID="cri-o://98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a" gracePeriod=2 Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.367053 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.472489 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-899wg\" (UniqueName: \"kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg\") pod \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.472620 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content\") pod \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.472827 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities\") pod \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\" (UID: \"f3df7896-fec1-4a7d-af34-53e34fd1a49f\") " Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.473610 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities" (OuterVolumeSpecName: "utilities") pod "f3df7896-fec1-4a7d-af34-53e34fd1a49f" (UID: "f3df7896-fec1-4a7d-af34-53e34fd1a49f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.477834 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg" (OuterVolumeSpecName: "kube-api-access-899wg") pod "f3df7896-fec1-4a7d-af34-53e34fd1a49f" (UID: "f3df7896-fec1-4a7d-af34-53e34fd1a49f"). InnerVolumeSpecName "kube-api-access-899wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.488102 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3df7896-fec1-4a7d-af34-53e34fd1a49f" (UID: "f3df7896-fec1-4a7d-af34-53e34fd1a49f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.574393 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.574421 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-899wg\" (UniqueName: \"kubernetes.io/projected/f3df7896-fec1-4a7d-af34-53e34fd1a49f-kube-api-access-899wg\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:48 crc kubenswrapper[4849]: I1203 12:45:48.574432 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3df7896-fec1-4a7d-af34-53e34fd1a49f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.039678 4849 generic.go:334] "Generic (PLEG): container finished" podID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerID="98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a" exitCode=0 Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.039857 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerDied","Data":"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a"} Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.040553 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5cj2" event={"ID":"f3df7896-fec1-4a7d-af34-53e34fd1a49f","Type":"ContainerDied","Data":"0ac982858258705d15262bd0b4f398775ca0e8bdf68c9cb8170db0e7e6540921"} Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.040625 4849 scope.go:117] "RemoveContainer" containerID="98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.039949 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5cj2" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.062156 4849 scope.go:117] "RemoveContainer" containerID="3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.066033 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.081326 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5cj2"] Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.082999 4849 scope.go:117] "RemoveContainer" containerID="ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.103757 4849 scope.go:117] "RemoveContainer" containerID="98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a" Dec 03 12:45:49 crc kubenswrapper[4849]: E1203 12:45:49.104201 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a\": container with ID starting with 98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a not found: ID does not exist" containerID="98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.104306 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a"} err="failed to get container status \"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a\": rpc error: code = NotFound desc = could not find container \"98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a\": container with ID starting with 98cd0a2673cae6cc4b218882c5fe21af13591184c87eb0e04cb627fbe32f392a not found: ID does not exist" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.104384 4849 scope.go:117] "RemoveContainer" containerID="3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd" Dec 03 12:45:49 crc kubenswrapper[4849]: E1203 12:45:49.104831 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd\": container with ID starting with 3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd not found: ID does not exist" containerID="3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.104866 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd"} err="failed to get container status \"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd\": rpc error: code = NotFound desc = could not find container \"3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd\": container with ID starting with 3ceb7fa5de1d085e525cac326c40dc1df33295d427c685059fbe3a910b7db5cd not found: ID does not exist" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.104900 4849 scope.go:117] "RemoveContainer" containerID="ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7" Dec 03 12:45:49 crc kubenswrapper[4849]: E1203 12:45:49.105142 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7\": container with ID starting with ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7 not found: ID does not exist" containerID="ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.105217 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7"} err="failed to get container status \"ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7\": rpc error: code = NotFound desc = could not find container \"ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7\": container with ID starting with ab27c834a0602f183357cf59ec976320a7863295bd4a169fc9b52c09547011f7 not found: ID does not exist" Dec 03 12:45:49 crc kubenswrapper[4849]: I1203 12:45:49.862258 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" path="/var/lib/kubelet/pods/f3df7896-fec1-4a7d-af34-53e34fd1a49f/volumes" Dec 03 12:45:52 crc kubenswrapper[4849]: I1203 12:45:52.856079 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:45:52 crc kubenswrapper[4849]: E1203 12:45:52.856592 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:07 crc kubenswrapper[4849]: I1203 12:46:07.858136 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:46:07 crc kubenswrapper[4849]: E1203 12:46:07.873855 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:18 crc kubenswrapper[4849]: I1203 12:46:18.856853 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:46:18 crc kubenswrapper[4849]: E1203 12:46:18.857416 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:29 crc kubenswrapper[4849]: I1203 12:46:29.856461 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:46:29 crc kubenswrapper[4849]: E1203 12:46:29.857865 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:43 crc kubenswrapper[4849]: I1203 12:46:43.859624 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:46:43 crc kubenswrapper[4849]: E1203 12:46:43.860255 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:45 crc kubenswrapper[4849]: E1203 12:46:45.862611 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:46:45 crc kubenswrapper[4849]: E1203 12:46:45.862947 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:46:45 crc kubenswrapper[4849]: E1203 12:46:45.863062 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:46:45 crc kubenswrapper[4849]: E1203 12:46:45.864702 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:46:55 crc kubenswrapper[4849]: I1203 12:46:55.857151 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:46:55 crc kubenswrapper[4849]: E1203 12:46:55.857663 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:46:56 crc kubenswrapper[4849]: E1203 12:46:56.857936 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.237993 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:07 crc kubenswrapper[4849]: E1203 12:47:07.238596 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="registry-server" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.238608 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="registry-server" Dec 03 12:47:07 crc kubenswrapper[4849]: E1203 12:47:07.238616 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="extract-content" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.238622 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="extract-content" Dec 03 12:47:07 crc kubenswrapper[4849]: E1203 12:47:07.238659 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="extract-utilities" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.238667 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="extract-utilities" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.238869 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3df7896-fec1-4a7d-af34-53e34fd1a49f" containerName="registry-server" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.239990 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.245368 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.429969 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.430098 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.430134 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhjjh\" (UniqueName: \"kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.531704 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.531755 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhjjh\" (UniqueName: \"kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.531821 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.532208 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.532230 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.555269 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhjjh\" (UniqueName: \"kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh\") pod \"certified-operators-wblth\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.556550 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:07 crc kubenswrapper[4849]: I1203 12:47:07.967789 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:08 crc kubenswrapper[4849]: I1203 12:47:08.482843 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerID="320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf" exitCode=0 Dec 03 12:47:08 crc kubenswrapper[4849]: I1203 12:47:08.482919 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerDied","Data":"320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf"} Dec 03 12:47:08 crc kubenswrapper[4849]: I1203 12:47:08.483055 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerStarted","Data":"77c0a5796f2fb2a06a82a63c8ba8109bde45c7953cdc2f55b116377b293a7cf5"} Dec 03 12:47:08 crc kubenswrapper[4849]: I1203 12:47:08.484425 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:47:09 crc kubenswrapper[4849]: I1203 12:47:09.494118 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerID="0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899" exitCode=0 Dec 03 12:47:09 crc kubenswrapper[4849]: I1203 12:47:09.494178 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerDied","Data":"0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899"} Dec 03 12:47:09 crc kubenswrapper[4849]: I1203 12:47:09.856980 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:47:09 crc kubenswrapper[4849]: E1203 12:47:09.857188 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:47:09 crc kubenswrapper[4849]: E1203 12:47:09.857955 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:47:10 crc kubenswrapper[4849]: I1203 12:47:10.501492 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerStarted","Data":"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d"} Dec 03 12:47:10 crc kubenswrapper[4849]: I1203 12:47:10.514683 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wblth" podStartSLOduration=2.039306233 podStartE2EDuration="3.514668657s" podCreationTimestamp="2025-12-03 12:47:07 +0000 UTC" firstStartedPulling="2025-12-03 12:47:08.484203769 +0000 UTC m=+1574.946051551" lastFinishedPulling="2025-12-03 12:47:09.959566192 +0000 UTC m=+1576.421413975" observedRunningTime="2025-12-03 12:47:10.51215487 +0000 UTC m=+1576.974002653" watchObservedRunningTime="2025-12-03 12:47:10.514668657 +0000 UTC m=+1576.976516440" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.125006 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.126962 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.132064 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.219547 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.219932 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.220044 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpt8x\" (UniqueName: \"kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.321673 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.321731 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpt8x\" (UniqueName: \"kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.321789 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.322182 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.322266 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.349052 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpt8x\" (UniqueName: \"kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x\") pod \"community-operators-9q8cp\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.439988 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:13 crc kubenswrapper[4849]: I1203 12:47:13.793256 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:13 crc kubenswrapper[4849]: W1203 12:47:13.801485 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b2c6df9_d535_4e70_9c7a_8b1a383896e3.slice/crio-eff5a2b089bd1d447828858a569702c8f0152dc8e4110529d06caf4b1cf371e3 WatchSource:0}: Error finding container eff5a2b089bd1d447828858a569702c8f0152dc8e4110529d06caf4b1cf371e3: Status 404 returned error can't find the container with id eff5a2b089bd1d447828858a569702c8f0152dc8e4110529d06caf4b1cf371e3 Dec 03 12:47:14 crc kubenswrapper[4849]: I1203 12:47:14.523086 4849 generic.go:334] "Generic (PLEG): container finished" podID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerID="cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92" exitCode=0 Dec 03 12:47:14 crc kubenswrapper[4849]: I1203 12:47:14.523124 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerDied","Data":"cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92"} Dec 03 12:47:14 crc kubenswrapper[4849]: I1203 12:47:14.523148 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerStarted","Data":"eff5a2b089bd1d447828858a569702c8f0152dc8e4110529d06caf4b1cf371e3"} Dec 03 12:47:15 crc kubenswrapper[4849]: I1203 12:47:15.529740 4849 generic.go:334] "Generic (PLEG): container finished" podID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerID="5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7" exitCode=0 Dec 03 12:47:15 crc kubenswrapper[4849]: I1203 12:47:15.529822 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerDied","Data":"5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7"} Dec 03 12:47:16 crc kubenswrapper[4849]: I1203 12:47:16.536180 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerStarted","Data":"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374"} Dec 03 12:47:16 crc kubenswrapper[4849]: I1203 12:47:16.549094 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9q8cp" podStartSLOduration=2.068597758 podStartE2EDuration="3.549082032s" podCreationTimestamp="2025-12-03 12:47:13 +0000 UTC" firstStartedPulling="2025-12-03 12:47:14.525117078 +0000 UTC m=+1580.986964860" lastFinishedPulling="2025-12-03 12:47:16.005601351 +0000 UTC m=+1582.467449134" observedRunningTime="2025-12-03 12:47:16.547758353 +0000 UTC m=+1583.009606136" watchObservedRunningTime="2025-12-03 12:47:16.549082032 +0000 UTC m=+1583.010929816" Dec 03 12:47:17 crc kubenswrapper[4849]: I1203 12:47:17.557312 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:17 crc kubenswrapper[4849]: I1203 12:47:17.557469 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:17 crc kubenswrapper[4849]: I1203 12:47:17.586556 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:18 crc kubenswrapper[4849]: I1203 12:47:18.575125 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:19 crc kubenswrapper[4849]: I1203 12:47:19.217662 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.560807 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wblth" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="registry-server" containerID="cri-o://03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d" gracePeriod=2 Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.856457 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:47:20 crc kubenswrapper[4849]: E1203 12:47:20.856786 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.894333 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.918600 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content\") pod \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.918718 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhjjh\" (UniqueName: \"kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh\") pod \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.918799 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities\") pod \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\" (UID: \"3c7914aa-f3ec-4dce-abf0-8ae01571764e\") " Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.919361 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities" (OuterVolumeSpecName: "utilities") pod "3c7914aa-f3ec-4dce-abf0-8ae01571764e" (UID: "3c7914aa-f3ec-4dce-abf0-8ae01571764e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.922384 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh" (OuterVolumeSpecName: "kube-api-access-jhjjh") pod "3c7914aa-f3ec-4dce-abf0-8ae01571764e" (UID: "3c7914aa-f3ec-4dce-abf0-8ae01571764e"). InnerVolumeSpecName "kube-api-access-jhjjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:47:20 crc kubenswrapper[4849]: I1203 12:47:20.951312 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c7914aa-f3ec-4dce-abf0-8ae01571764e" (UID: "3c7914aa-f3ec-4dce-abf0-8ae01571764e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.020179 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.020203 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhjjh\" (UniqueName: \"kubernetes.io/projected/3c7914aa-f3ec-4dce-abf0-8ae01571764e-kube-api-access-jhjjh\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.020214 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c7914aa-f3ec-4dce-abf0-8ae01571764e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.567421 4849 generic.go:334] "Generic (PLEG): container finished" podID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerID="03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d" exitCode=0 Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.567457 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerDied","Data":"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d"} Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.567473 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wblth" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.567500 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wblth" event={"ID":"3c7914aa-f3ec-4dce-abf0-8ae01571764e","Type":"ContainerDied","Data":"77c0a5796f2fb2a06a82a63c8ba8109bde45c7953cdc2f55b116377b293a7cf5"} Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.567521 4849 scope.go:117] "RemoveContainer" containerID="03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.582431 4849 scope.go:117] "RemoveContainer" containerID="0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.591503 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.596306 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wblth"] Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.613576 4849 scope.go:117] "RemoveContainer" containerID="320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.624198 4849 scope.go:117] "RemoveContainer" containerID="03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d" Dec 03 12:47:21 crc kubenswrapper[4849]: E1203 12:47:21.624693 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d\": container with ID starting with 03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d not found: ID does not exist" containerID="03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.624749 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d"} err="failed to get container status \"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d\": rpc error: code = NotFound desc = could not find container \"03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d\": container with ID starting with 03ccceb151daf1f0f7b7af94cb28a23ea73647c8f35b7a3ee4018055dc44987d not found: ID does not exist" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.624771 4849 scope.go:117] "RemoveContainer" containerID="0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899" Dec 03 12:47:21 crc kubenswrapper[4849]: E1203 12:47:21.625813 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899\": container with ID starting with 0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899 not found: ID does not exist" containerID="0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.625874 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899"} err="failed to get container status \"0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899\": rpc error: code = NotFound desc = could not find container \"0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899\": container with ID starting with 0589ae289007038c0b5a617bee795262e7de5b5da9e1b5f5b8f67eeb7454c899 not found: ID does not exist" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.625895 4849 scope.go:117] "RemoveContainer" containerID="320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf" Dec 03 12:47:21 crc kubenswrapper[4849]: E1203 12:47:21.626149 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf\": container with ID starting with 320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf not found: ID does not exist" containerID="320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.626173 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf"} err="failed to get container status \"320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf\": rpc error: code = NotFound desc = could not find container \"320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf\": container with ID starting with 320f66a53927ee3a1a8c5526129e082c9d311cb4c239b343841214a0a8f6e8bf not found: ID does not exist" Dec 03 12:47:21 crc kubenswrapper[4849]: E1203 12:47:21.856842 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:47:21 crc kubenswrapper[4849]: I1203 12:47:21.862917 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" path="/var/lib/kubelet/pods/3c7914aa-f3ec-4dce-abf0-8ae01571764e/volumes" Dec 03 12:47:23 crc kubenswrapper[4849]: I1203 12:47:23.440797 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:23 crc kubenswrapper[4849]: I1203 12:47:23.441015 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:23 crc kubenswrapper[4849]: I1203 12:47:23.470299 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:23 crc kubenswrapper[4849]: I1203 12:47:23.611139 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:24 crc kubenswrapper[4849]: I1203 12:47:24.618635 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.599307 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9q8cp" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="registry-server" containerID="cri-o://53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374" gracePeriod=2 Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.929996 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.994469 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpt8x\" (UniqueName: \"kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x\") pod \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.994545 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content\") pod \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.994608 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities\") pod \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\" (UID: \"9b2c6df9-d535-4e70-9c7a-8b1a383896e3\") " Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.995357 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities" (OuterVolumeSpecName: "utilities") pod "9b2c6df9-d535-4e70-9c7a-8b1a383896e3" (UID: "9b2c6df9-d535-4e70-9c7a-8b1a383896e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:47:26 crc kubenswrapper[4849]: I1203 12:47:26.999320 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x" (OuterVolumeSpecName: "kube-api-access-bpt8x") pod "9b2c6df9-d535-4e70-9c7a-8b1a383896e3" (UID: "9b2c6df9-d535-4e70-9c7a-8b1a383896e3"). InnerVolumeSpecName "kube-api-access-bpt8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.031513 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b2c6df9-d535-4e70-9c7a-8b1a383896e3" (UID: "9b2c6df9-d535-4e70-9c7a-8b1a383896e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.096726 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpt8x\" (UniqueName: \"kubernetes.io/projected/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-kube-api-access-bpt8x\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.096757 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.096767 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b2c6df9-d535-4e70-9c7a-8b1a383896e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.606278 4849 generic.go:334] "Generic (PLEG): container finished" podID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerID="53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374" exitCode=0 Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.606312 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerDied","Data":"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374"} Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.606329 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9q8cp" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.606348 4849 scope.go:117] "RemoveContainer" containerID="53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.606336 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9q8cp" event={"ID":"9b2c6df9-d535-4e70-9c7a-8b1a383896e3","Type":"ContainerDied","Data":"eff5a2b089bd1d447828858a569702c8f0152dc8e4110529d06caf4b1cf371e3"} Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.625347 4849 scope.go:117] "RemoveContainer" containerID="5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.644539 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.645601 4849 scope.go:117] "RemoveContainer" containerID="cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.650072 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9q8cp"] Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662026 4849 scope.go:117] "RemoveContainer" containerID="53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374" Dec 03 12:47:27 crc kubenswrapper[4849]: E1203 12:47:27.662371 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374\": container with ID starting with 53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374 not found: ID does not exist" containerID="53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662399 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374"} err="failed to get container status \"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374\": rpc error: code = NotFound desc = could not find container \"53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374\": container with ID starting with 53ccc826db1abcfa37300c23d35c053473e77d9ddeda1829e8e814cdbf5ed374 not found: ID does not exist" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662418 4849 scope.go:117] "RemoveContainer" containerID="5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7" Dec 03 12:47:27 crc kubenswrapper[4849]: E1203 12:47:27.662627 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7\": container with ID starting with 5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7 not found: ID does not exist" containerID="5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662662 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7"} err="failed to get container status \"5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7\": rpc error: code = NotFound desc = could not find container \"5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7\": container with ID starting with 5f67a40294d622b163804bd3465030be19293734d90e5d49be4ebf1c7d484fb7 not found: ID does not exist" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662676 4849 scope.go:117] "RemoveContainer" containerID="cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92" Dec 03 12:47:27 crc kubenswrapper[4849]: E1203 12:47:27.662880 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92\": container with ID starting with cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92 not found: ID does not exist" containerID="cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.662904 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92"} err="failed to get container status \"cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92\": rpc error: code = NotFound desc = could not find container \"cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92\": container with ID starting with cc48925ae6c922e21693a34e0b4bdc680fa52008b6775cb2e4232b673759cc92 not found: ID does not exist" Dec 03 12:47:27 crc kubenswrapper[4849]: I1203 12:47:27.863320 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" path="/var/lib/kubelet/pods/9b2c6df9-d535-4e70-9c7a-8b1a383896e3/volumes" Dec 03 12:47:32 crc kubenswrapper[4849]: I1203 12:47:32.857225 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:47:32 crc kubenswrapper[4849]: E1203 12:47:32.857848 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:47:32 crc kubenswrapper[4849]: E1203 12:47:32.858335 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:47:43 crc kubenswrapper[4849]: I1203 12:47:43.860200 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:47:43 crc kubenswrapper[4849]: E1203 12:47:43.860796 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:47:45 crc kubenswrapper[4849]: E1203 12:47:45.857967 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:47:57 crc kubenswrapper[4849]: I1203 12:47:57.856445 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:47:57 crc kubenswrapper[4849]: E1203 12:47:57.857419 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:48:00 crc kubenswrapper[4849]: E1203 12:48:00.858132 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:48:09 crc kubenswrapper[4849]: I1203 12:48:09.856353 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:48:09 crc kubenswrapper[4849]: E1203 12:48:09.856789 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:48:24 crc kubenswrapper[4849]: I1203 12:48:24.856844 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:48:24 crc kubenswrapper[4849]: E1203 12:48:24.857327 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:48:36 crc kubenswrapper[4849]: I1203 12:48:36.856568 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:48:36 crc kubenswrapper[4849]: E1203 12:48:36.857432 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:48:48 crc kubenswrapper[4849]: I1203 12:48:48.856531 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:48:48 crc kubenswrapper[4849]: E1203 12:48:48.857690 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:49:02 crc kubenswrapper[4849]: I1203 12:49:02.856723 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:49:02 crc kubenswrapper[4849]: E1203 12:49:02.857399 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:49:13 crc kubenswrapper[4849]: I1203 12:49:13.859342 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:49:13 crc kubenswrapper[4849]: E1203 12:49:13.859904 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:49:26 crc kubenswrapper[4849]: I1203 12:49:26.856343 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:49:26 crc kubenswrapper[4849]: E1203 12:49:26.856979 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:49:38 crc kubenswrapper[4849]: I1203 12:49:38.857076 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:49:38 crc kubenswrapper[4849]: E1203 12:49:38.857708 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:49:52 crc kubenswrapper[4849]: I1203 12:49:52.856844 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:49:52 crc kubenswrapper[4849]: E1203 12:49:52.858202 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:50:07 crc kubenswrapper[4849]: I1203 12:50:07.856396 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:50:07 crc kubenswrapper[4849]: E1203 12:50:07.856975 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:50:12 crc kubenswrapper[4849]: E1203 12:50:12.862490 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:50:12 crc kubenswrapper[4849]: E1203 12:50:12.862674 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:50:12 crc kubenswrapper[4849]: E1203 12:50:12.862778 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:50:12 crc kubenswrapper[4849]: E1203 12:50:12.863910 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:50:22 crc kubenswrapper[4849]: I1203 12:50:22.856387 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:50:23 crc kubenswrapper[4849]: I1203 12:50:23.536144 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151"} Dec 03 12:50:24 crc kubenswrapper[4849]: E1203 12:50:24.858307 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:50:39 crc kubenswrapper[4849]: E1203 12:50:39.857890 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:50:50 crc kubenswrapper[4849]: E1203 12:50:50.857711 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:51:03 crc kubenswrapper[4849]: E1203 12:51:03.862653 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:51:14 crc kubenswrapper[4849]: E1203 12:51:14.858801 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:51:28 crc kubenswrapper[4849]: E1203 12:51:28.857503 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:51:42 crc kubenswrapper[4849]: E1203 12:51:42.857869 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:51:55 crc kubenswrapper[4849]: E1203 12:51:55.858901 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:52:10 crc kubenswrapper[4849]: E1203 12:52:10.858991 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:52:22 crc kubenswrapper[4849]: I1203 12:52:22.676972 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:52:22 crc kubenswrapper[4849]: I1203 12:52:22.677532 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:52:22 crc kubenswrapper[4849]: E1203 12:52:22.858587 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:52:35 crc kubenswrapper[4849]: E1203 12:52:35.857819 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.699360 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-qm425/must-gather-6gxtm"] Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700139 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="extract-utilities" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700151 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="extract-utilities" Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700170 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700176 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700187 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="extract-utilities" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700192 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="extract-utilities" Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700201 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="extract-content" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700206 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="extract-content" Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700218 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700223 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: E1203 12:52:46.700235 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="extract-content" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700241 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="extract-content" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700371 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b2c6df9-d535-4e70-9c7a-8b1a383896e3" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.700382 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c7914aa-f3ec-4dce-abf0-8ae01571764e" containerName="registry-server" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.701070 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.703411 4849 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-qm425"/"default-dockercfg-g6h4f" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.704930 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-qm425"/"openshift-service-ca.crt" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.709220 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-qm425/must-gather-6gxtm"] Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.715913 4849 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-qm425"/"kube-root-ca.crt" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.748877 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzjs6\" (UniqueName: \"kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.749124 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.850432 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzjs6\" (UniqueName: \"kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.850676 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.851073 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:46 crc kubenswrapper[4849]: I1203 12:52:46.866487 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzjs6\" (UniqueName: \"kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6\") pod \"must-gather-6gxtm\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:47 crc kubenswrapper[4849]: I1203 12:52:47.017170 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:52:47 crc kubenswrapper[4849]: I1203 12:52:47.379674 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-qm425/must-gather-6gxtm"] Dec 03 12:52:47 crc kubenswrapper[4849]: I1203 12:52:47.383246 4849 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 12:52:48 crc kubenswrapper[4849]: I1203 12:52:48.322438 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qm425/must-gather-6gxtm" event={"ID":"295678a6-02fd-49e6-aaed-5d31b0bdfe35","Type":"ContainerStarted","Data":"e4b2ee62560016be41de7c996e2699bd4b0b9c7012e563f2b9053211c58bd35e"} Dec 03 12:52:49 crc kubenswrapper[4849]: E1203 12:52:49.857693 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:52:52 crc kubenswrapper[4849]: I1203 12:52:52.365039 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qm425/must-gather-6gxtm" event={"ID":"295678a6-02fd-49e6-aaed-5d31b0bdfe35","Type":"ContainerStarted","Data":"da26e67fed20f1f76e544d6ae92e088f1b736c75b270171dad3421ca99ad282a"} Dec 03 12:52:52 crc kubenswrapper[4849]: I1203 12:52:52.677145 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:52:52 crc kubenswrapper[4849]: I1203 12:52:52.677195 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:52:53 crc kubenswrapper[4849]: I1203 12:52:53.372415 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qm425/must-gather-6gxtm" event={"ID":"295678a6-02fd-49e6-aaed-5d31b0bdfe35","Type":"ContainerStarted","Data":"6ec003ff90aef1589e0d3fa34efc5a5035fc7cc3e10a5bd9ad2053855bc89378"} Dec 03 12:52:53 crc kubenswrapper[4849]: I1203 12:52:53.385462 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-qm425/must-gather-6gxtm" podStartSLOduration=2.586932199 podStartE2EDuration="7.385449871s" podCreationTimestamp="2025-12-03 12:52:46 +0000 UTC" firstStartedPulling="2025-12-03 12:52:47.383034313 +0000 UTC m=+1913.844882096" lastFinishedPulling="2025-12-03 12:52:52.181551984 +0000 UTC m=+1918.643399768" observedRunningTime="2025-12-03 12:52:53.38519498 +0000 UTC m=+1919.847042764" watchObservedRunningTime="2025-12-03 12:52:53.385449871 +0000 UTC m=+1919.847297653" Dec 03 12:53:20 crc kubenswrapper[4849]: I1203 12:53:20.121590 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-m4cms_19cb4ba0-d936-4448-b004-402ec12d9bdd/control-plane-machine-set-operator/0.log" Dec 03 12:53:20 crc kubenswrapper[4849]: I1203 12:53:20.214831 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-trkjr_cef45b47-894e-4bfe-82ff-352085dbf93e/kube-rbac-proxy/0.log" Dec 03 12:53:20 crc kubenswrapper[4849]: I1203 12:53:20.271803 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-trkjr_cef45b47-894e-4bfe-82ff-352085dbf93e/machine-api-operator/0.log" Dec 03 12:53:22 crc kubenswrapper[4849]: I1203 12:53:22.677177 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:53:22 crc kubenswrapper[4849]: I1203 12:53:22.677455 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:53:22 crc kubenswrapper[4849]: I1203 12:53:22.677493 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:53:22 crc kubenswrapper[4849]: I1203 12:53:22.677885 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:53:22 crc kubenswrapper[4849]: I1203 12:53:22.677927 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151" gracePeriod=600 Dec 03 12:53:23 crc kubenswrapper[4849]: I1203 12:53:23.559063 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151" exitCode=0 Dec 03 12:53:23 crc kubenswrapper[4849]: I1203 12:53:23.559091 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151"} Dec 03 12:53:23 crc kubenswrapper[4849]: I1203 12:53:23.559578 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerStarted","Data":"13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0"} Dec 03 12:53:23 crc kubenswrapper[4849]: I1203 12:53:23.559621 4849 scope.go:117] "RemoveContainer" containerID="f879274dc2112a3c48b27c6464d305ef97538aab0e6009c23a19383190323a5a" Dec 03 12:53:28 crc kubenswrapper[4849]: I1203 12:53:28.170002 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-d8cfb_3f221c87-0071-43b0-986e-425c3d54a75a/cert-manager-controller/0.log" Dec 03 12:53:28 crc kubenswrapper[4849]: I1203 12:53:28.288913 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-26qw2_a6a7a713-4a04-49fc-98b2-9c59610fa61b/cert-manager-cainjector/0.log" Dec 03 12:53:28 crc kubenswrapper[4849]: I1203 12:53:28.333165 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-mg8hm_236de675-5b13-478e-95d2-0f6da1047034/cert-manager-webhook/0.log" Dec 03 12:53:35 crc kubenswrapper[4849]: I1203 12:53:35.903314 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-l6hxr_2b81e93a-513f-45e6-b647-e4767283c8e5/nmstate-console-plugin/0.log" Dec 03 12:53:35 crc kubenswrapper[4849]: I1203 12:53:35.963564 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-r2xvh_7ddd8581-dade-41ec-8f10-6fa2ea56f767/nmstate-handler/0.log" Dec 03 12:53:36 crc kubenswrapper[4849]: I1203 12:53:36.032487 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-j2x7n_1aa532e3-4844-4ff4-a359-8414e22efb83/kube-rbac-proxy/0.log" Dec 03 12:53:36 crc kubenswrapper[4849]: I1203 12:53:36.059252 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-j2x7n_1aa532e3-4844-4ff4-a359-8414e22efb83/nmstate-metrics/0.log" Dec 03 12:53:36 crc kubenswrapper[4849]: I1203 12:53:36.157129 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5v46h_3f89cce2-ef29-4fce-9144-cffaa419c936/nmstate-operator/0.log" Dec 03 12:53:36 crc kubenswrapper[4849]: I1203 12:53:36.192442 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-9pl9w_b34bd184-2a40-4689-817b-0fff0d519a11/nmstate-webhook/0.log" Dec 03 12:53:44 crc kubenswrapper[4849]: I1203 12:53:44.511695 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-866f574876-c6js2_b9050d43-e279-4812-89c0-7a9cce7f5f12/kube-rbac-proxy/0.log" Dec 03 12:53:44 crc kubenswrapper[4849]: I1203 12:53:44.536148 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-866f574876-c6js2_b9050d43-e279-4812-89c0-7a9cce7f5f12/manager/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.529723 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_cluster-logging-operator-ff9846bd-6f9jr_9dbb9334-5825-448c-9d4a-9d4b890e9dea/cluster-logging-operator/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.607684 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_collector-86rhs_c71bb35c-e32a-4a28-98d4-8ba714fcd547/collector/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.711499 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-compactor-0_59f2429e-ddd5-463a-88c2-35bb8c8e2faf/loki-compactor/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.725336 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-distributor-76cc67bf56-skzmr_08d80fb5-b663-48cc-9ec7-0c4fbe92d7f9/loki-distributor/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.818778 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-9f98ffcc5-5nz6b_91698613-76b4-41b6-bd44-3197f5e6f5f8/gateway/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.850543 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-9f98ffcc5-5nz6b_91698613-76b4-41b6-bd44-3197f5e6f5f8/opa/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.934463 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-9f98ffcc5-9lgrf_f625b073-28c5-4b22-8a1e-2e43237fc19b/gateway/0.log" Dec 03 12:53:53 crc kubenswrapper[4849]: I1203 12:53:53.942266 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-gateway-9f98ffcc5-9lgrf_f625b073-28c5-4b22-8a1e-2e43237fc19b/opa/0.log" Dec 03 12:53:54 crc kubenswrapper[4849]: I1203 12:53:54.035142 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-index-gateway-0_bd46349d-af06-4123-a47e-634e322c840b/loki-index-gateway/0.log" Dec 03 12:53:54 crc kubenswrapper[4849]: I1203 12:53:54.068004 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-ingester-0_39d1c4cc-1f24-4c05-b7af-87cba182e3e6/loki-ingester/0.log" Dec 03 12:53:54 crc kubenswrapper[4849]: I1203 12:53:54.164746 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-querier-5895d59bb8-cn9h6_77037998-c847-4702-a80d-2c295922cb04/loki-querier/0.log" Dec 03 12:53:54 crc kubenswrapper[4849]: I1203 12:53:54.229855 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-logging_logging-loki-query-frontend-84558f7c9f-p8jpz_1e2e8068-2365-43fe-8b38-ad21b6007471/loki-query-frontend/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.541408 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fw2c2_069cef18-23c1-4c6f-b0e0-7fcab99a1d52/kube-rbac-proxy/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.642350 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-fw2c2_069cef18-23c1-4c6f-b0e0-7fcab99a1d52/controller/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.704555 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-frr-files/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.834577 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-reloader/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.837883 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-frr-files/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.843468 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-metrics/0.log" Dec 03 12:54:03 crc kubenswrapper[4849]: I1203 12:54:03.860313 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-reloader/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.011744 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-metrics/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.025474 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-reloader/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.029846 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-metrics/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.030285 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-frr-files/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.140085 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-frr-files/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.140752 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-reloader/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.153759 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/cp-metrics/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.172278 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/controller/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.267363 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/frr-metrics/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.291708 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/kube-rbac-proxy-frr/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.302087 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/kube-rbac-proxy/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.310203 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/frr/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.392981 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-kgj8v_c6d0dda9-8382-472b-903b-8664e57f3fc5/reloader/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.451891 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-6w6ng_b6b66905-7716-42b4-94a6-4de28876a7d6/frr-k8s-webhook-server/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.580845 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-77f6f7b67b-d7sth_008d5799-334c-4ee1-af23-c7d44925cd90/webhook-server/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.590206 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7fcb955f57-kpvw4_9b7b5461-2704-44b1-abce-14ddfebec290/manager/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.679491 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dchnp_84e9fe4d-6c68-464b-83f4-ba0889ae9b73/kube-rbac-proxy/0.log" Dec 03 12:54:04 crc kubenswrapper[4849]: I1203 12:54:04.817005 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dchnp_84e9fe4d-6c68-464b-83f4-ba0889ae9b73/speaker/0.log" Dec 03 12:54:12 crc kubenswrapper[4849]: I1203 12:54:12.927727 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.079482 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.107032 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.108163 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.238783 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.252609 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.265608 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_4529ed37fc81381df2b45ea09e6f1b4af8d1558d603912431befd8aeb88qhkx_931dd621-537b-4ff8-96c6-f78015b8c33f/extract/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.357989 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.496968 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.503176 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.513448 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.713882 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/extract/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.730523 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.743600 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjm94t_03105ea5-dd64-4df6-9158-d45a6686afed/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.845112 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.968201 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/pull/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.977353 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/util/0.log" Dec 03 12:54:13 crc kubenswrapper[4849]: I1203 12:54:13.983412 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.107665 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.126345 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.135198 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210wxszh_09bda4af-bca8-4f80-9d40-ab5f3e5459ce/extract/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.238750 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.373575 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.382110 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.396238 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.501456 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.521999 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.535393 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a8a03f72555e3294619fd3c0a789fa82d1f6921a8cf9935ed9b211463f4gd8x_3bf60e1a-b6a1-430a-bf47-776b86bf7c90/extract/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.632029 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.755570 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.755609 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/pull/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.769780 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.900556 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/util/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.930200 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/extract/0.log" Dec 03 12:54:14 crc kubenswrapper[4849]: I1203 12:54:14.933844 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g9hvb_1f0eee16-27ac-4f1d-a968-5424af3ee3d2/pull/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.031915 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.169680 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.171467 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.172041 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.296510 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.297901 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.476342 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.572476 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8fhxw_a98469a9-b6c8-4fa3-a639-89059d0a4de8/registry-server/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.597241 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.652953 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.668866 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.788965 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-content/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.793120 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/extract-utilities/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.852022 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-bk499_2079bd80-c5b1-42e4-b5ed-a8c7ba357882/marketplace-operator/0.log" Dec 03 12:54:15 crc kubenswrapper[4849]: I1203 12:54:15.973594 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.101214 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dr9hn_8f23f72f-8e85-4f2c-91ee-b11942536d44/registry-server/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.128367 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.132998 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.148309 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.251116 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.255169 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.306606 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.334071 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-g7tp4_d412054f-4ed5-4275-bfed-e6f2160d41ee/registry-server/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.535034 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.537522 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.545988 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.647616 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-content/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.655175 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/extract-utilities/0.log" Dec 03 12:54:16 crc kubenswrapper[4849]: I1203 12:54:16.952040 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6sw5z_d9b0e814-298c-4693-874f-6687f99b49ef/registry-server/0.log" Dec 03 12:54:24 crc kubenswrapper[4849]: I1203 12:54:24.675157 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-7zxv7_4289cd35-0b12-4095-9c25-b071d4351d32/prometheus-operator/0.log" Dec 03 12:54:24 crc kubenswrapper[4849]: I1203 12:54:24.724276 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-84db7f5694-67hvz_0af871d7-9f6d-49ba-97ad-d0d36cceed52/prometheus-operator-admission-webhook/0.log" Dec 03 12:54:24 crc kubenswrapper[4849]: I1203 12:54:24.796385 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-84db7f5694-gnt8p_ffaeeb44-3be6-4407-9095-339c36ae6c58/prometheus-operator-admission-webhook/0.log" Dec 03 12:54:24 crc kubenswrapper[4849]: I1203 12:54:24.876372 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-4p9g9_64bb0224-aa3f-4b8a-854e-251422819440/operator/0.log" Dec 03 12:54:24 crc kubenswrapper[4849]: I1203 12:54:24.926889 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-xtwpc_49c7f06b-878f-4814-8196-b4a91e2fbca7/perses-operator/0.log" Dec 03 12:54:33 crc kubenswrapper[4849]: I1203 12:54:33.180915 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-866f574876-c6js2_b9050d43-e279-4812-89c0-7a9cce7f5f12/kube-rbac-proxy/0.log" Dec 03 12:54:33 crc kubenswrapper[4849]: I1203 12:54:33.208114 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-866f574876-c6js2_b9050d43-e279-4812-89c0-7a9cce7f5f12/manager/0.log" Dec 03 12:55:01 crc kubenswrapper[4849]: E1203 12:55:01.861539 4849 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:55:01 crc kubenswrapper[4849]: E1203 12:55:01.861924 4849 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" image="38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333" Dec 03 12:55:01 crc kubenswrapper[4849]: E1203 12:55:01.862112 4849 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vdp7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-qjkcg_openstack-operators(d93bf9a7-2e41-4abf-9ec8-04480010f205): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \"http://38.102.83.155:5001/v2/\": dial tcp 38.102.83.155:5001: i/o timeout" logger="UnhandledError" Dec 03 12:55:01 crc kubenswrapper[4849]: E1203 12:55:01.863259 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333: pinging container registry 38.102.83.155:5001: Get \\\"http://38.102.83.155:5001/v2/\\\": dial tcp 38.102.83.155:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:55:15 crc kubenswrapper[4849]: I1203 12:55:15.180704 4849 generic.go:334] "Generic (PLEG): container finished" podID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerID="da26e67fed20f1f76e544d6ae92e088f1b736c75b270171dad3421ca99ad282a" exitCode=0 Dec 03 12:55:15 crc kubenswrapper[4849]: I1203 12:55:15.180773 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qm425/must-gather-6gxtm" event={"ID":"295678a6-02fd-49e6-aaed-5d31b0bdfe35","Type":"ContainerDied","Data":"da26e67fed20f1f76e544d6ae92e088f1b736c75b270171dad3421ca99ad282a"} Dec 03 12:55:15 crc kubenswrapper[4849]: I1203 12:55:15.181555 4849 scope.go:117] "RemoveContainer" containerID="da26e67fed20f1f76e544d6ae92e088f1b736c75b270171dad3421ca99ad282a" Dec 03 12:55:15 crc kubenswrapper[4849]: I1203 12:55:15.348335 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qm425_must-gather-6gxtm_295678a6-02fd-49e6-aaed-5d31b0bdfe35/gather/0.log" Dec 03 12:55:16 crc kubenswrapper[4849]: E1203 12:55:16.861282 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.002270 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-qm425/must-gather-6gxtm"] Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.002613 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-qm425/must-gather-6gxtm" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="copy" containerID="cri-o://6ec003ff90aef1589e0d3fa34efc5a5035fc7cc3e10a5bd9ad2053855bc89378" gracePeriod=2 Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.008077 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-qm425/must-gather-6gxtm"] Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.227273 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qm425_must-gather-6gxtm_295678a6-02fd-49e6-aaed-5d31b0bdfe35/copy/0.log" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.230070 4849 generic.go:334] "Generic (PLEG): container finished" podID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerID="6ec003ff90aef1589e0d3fa34efc5a5035fc7cc3e10a5bd9ad2053855bc89378" exitCode=143 Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.326466 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qm425_must-gather-6gxtm_295678a6-02fd-49e6-aaed-5d31b0bdfe35/copy/0.log" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.326906 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.343017 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzjs6\" (UniqueName: \"kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6\") pod \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.343078 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output\") pod \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\" (UID: \"295678a6-02fd-49e6-aaed-5d31b0bdfe35\") " Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.352440 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6" (OuterVolumeSpecName: "kube-api-access-fzjs6") pod "295678a6-02fd-49e6-aaed-5d31b0bdfe35" (UID: "295678a6-02fd-49e6-aaed-5d31b0bdfe35"). InnerVolumeSpecName "kube-api-access-fzjs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.404249 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "295678a6-02fd-49e6-aaed-5d31b0bdfe35" (UID: "295678a6-02fd-49e6-aaed-5d31b0bdfe35"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.444590 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzjs6\" (UniqueName: \"kubernetes.io/projected/295678a6-02fd-49e6-aaed-5d31b0bdfe35-kube-api-access-fzjs6\") on node \"crc\" DevicePath \"\"" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.444620 4849 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/295678a6-02fd-49e6-aaed-5d31b0bdfe35-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.677286 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:55:22 crc kubenswrapper[4849]: I1203 12:55:22.677494 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:55:23 crc kubenswrapper[4849]: I1203 12:55:23.236451 4849 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qm425_must-gather-6gxtm_295678a6-02fd-49e6-aaed-5d31b0bdfe35/copy/0.log" Dec 03 12:55:23 crc kubenswrapper[4849]: I1203 12:55:23.237266 4849 scope.go:117] "RemoveContainer" containerID="6ec003ff90aef1589e0d3fa34efc5a5035fc7cc3e10a5bd9ad2053855bc89378" Dec 03 12:55:23 crc kubenswrapper[4849]: I1203 12:55:23.237296 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qm425/must-gather-6gxtm" Dec 03 12:55:23 crc kubenswrapper[4849]: I1203 12:55:23.250832 4849 scope.go:117] "RemoveContainer" containerID="da26e67fed20f1f76e544d6ae92e088f1b736c75b270171dad3421ca99ad282a" Dec 03 12:55:23 crc kubenswrapper[4849]: I1203 12:55:23.862668 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" path="/var/lib/kubelet/pods/295678a6-02fd-49e6-aaed-5d31b0bdfe35/volumes" Dec 03 12:55:28 crc kubenswrapper[4849]: E1203 12:55:28.858243 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.321719 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:42 crc kubenswrapper[4849]: E1203 12:55:42.322306 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="copy" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.322319 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="copy" Dec 03 12:55:42 crc kubenswrapper[4849]: E1203 12:55:42.322337 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="gather" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.322342 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="gather" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.322486 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="gather" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.322508 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="295678a6-02fd-49e6-aaed-5d31b0bdfe35" containerName="copy" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.323452 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.333667 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.414240 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.414306 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfptv\" (UniqueName: \"kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.414814 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.516676 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.516719 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.516758 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfptv\" (UniqueName: \"kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.517386 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.517410 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.532689 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfptv\" (UniqueName: \"kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv\") pod \"redhat-operators-vmvqh\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: I1203 12:55:42.648948 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:42 crc kubenswrapper[4849]: E1203 12:55:42.858591 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:55:43 crc kubenswrapper[4849]: I1203 12:55:43.035988 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:43 crc kubenswrapper[4849]: I1203 12:55:43.360172 4849 generic.go:334] "Generic (PLEG): container finished" podID="103d9e4a-9b77-44f1-8e2d-202475857239" containerID="b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9" exitCode=0 Dec 03 12:55:43 crc kubenswrapper[4849]: I1203 12:55:43.360211 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerDied","Data":"b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9"} Dec 03 12:55:43 crc kubenswrapper[4849]: I1203 12:55:43.360388 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerStarted","Data":"53aedfc1cbc22d7ba3387cac6d91e0cc9380c75d4685c180c366ffbc1e9e4d5b"} Dec 03 12:55:44 crc kubenswrapper[4849]: I1203 12:55:44.367382 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerStarted","Data":"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d"} Dec 03 12:55:45 crc kubenswrapper[4849]: I1203 12:55:45.374249 4849 generic.go:334] "Generic (PLEG): container finished" podID="103d9e4a-9b77-44f1-8e2d-202475857239" containerID="bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d" exitCode=0 Dec 03 12:55:45 crc kubenswrapper[4849]: I1203 12:55:45.374285 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerDied","Data":"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d"} Dec 03 12:55:46 crc kubenswrapper[4849]: I1203 12:55:46.381900 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerStarted","Data":"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f"} Dec 03 12:55:46 crc kubenswrapper[4849]: I1203 12:55:46.403224 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vmvqh" podStartSLOduration=1.9237732159999998 podStartE2EDuration="4.403208446s" podCreationTimestamp="2025-12-03 12:55:42 +0000 UTC" firstStartedPulling="2025-12-03 12:55:43.361425823 +0000 UTC m=+2089.823273606" lastFinishedPulling="2025-12-03 12:55:45.840861053 +0000 UTC m=+2092.302708836" observedRunningTime="2025-12-03 12:55:46.40166799 +0000 UTC m=+2092.863515772" watchObservedRunningTime="2025-12-03 12:55:46.403208446 +0000 UTC m=+2092.865056228" Dec 03 12:55:52 crc kubenswrapper[4849]: I1203 12:55:52.649035 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:52 crc kubenswrapper[4849]: I1203 12:55:52.649257 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:52 crc kubenswrapper[4849]: I1203 12:55:52.677043 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:55:52 crc kubenswrapper[4849]: I1203 12:55:52.677085 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:55:52 crc kubenswrapper[4849]: I1203 12:55:52.679005 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:53 crc kubenswrapper[4849]: I1203 12:55:53.444910 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:53 crc kubenswrapper[4849]: I1203 12:55:53.477430 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:55 crc kubenswrapper[4849]: I1203 12:55:55.426203 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vmvqh" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="registry-server" containerID="cri-o://c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f" gracePeriod=2 Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.254346 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.317265 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content\") pod \"103d9e4a-9b77-44f1-8e2d-202475857239\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.317318 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfptv\" (UniqueName: \"kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv\") pod \"103d9e4a-9b77-44f1-8e2d-202475857239\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.317429 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities\") pod \"103d9e4a-9b77-44f1-8e2d-202475857239\" (UID: \"103d9e4a-9b77-44f1-8e2d-202475857239\") " Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.318176 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities" (OuterVolumeSpecName: "utilities") pod "103d9e4a-9b77-44f1-8e2d-202475857239" (UID: "103d9e4a-9b77-44f1-8e2d-202475857239"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.321527 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv" (OuterVolumeSpecName: "kube-api-access-vfptv") pod "103d9e4a-9b77-44f1-8e2d-202475857239" (UID: "103d9e4a-9b77-44f1-8e2d-202475857239"). InnerVolumeSpecName "kube-api-access-vfptv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.390027 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "103d9e4a-9b77-44f1-8e2d-202475857239" (UID: "103d9e4a-9b77-44f1-8e2d-202475857239"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.419552 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.419577 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfptv\" (UniqueName: \"kubernetes.io/projected/103d9e4a-9b77-44f1-8e2d-202475857239-kube-api-access-vfptv\") on node \"crc\" DevicePath \"\"" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.419588 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/103d9e4a-9b77-44f1-8e2d-202475857239-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.433855 4849 generic.go:334] "Generic (PLEG): container finished" podID="103d9e4a-9b77-44f1-8e2d-202475857239" containerID="c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f" exitCode=0 Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.433885 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerDied","Data":"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f"} Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.433918 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vmvqh" event={"ID":"103d9e4a-9b77-44f1-8e2d-202475857239","Type":"ContainerDied","Data":"53aedfc1cbc22d7ba3387cac6d91e0cc9380c75d4685c180c366ffbc1e9e4d5b"} Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.433919 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vmvqh" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.433942 4849 scope.go:117] "RemoveContainer" containerID="c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.446235 4849 scope.go:117] "RemoveContainer" containerID="bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.455703 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.461069 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vmvqh"] Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.478154 4849 scope.go:117] "RemoveContainer" containerID="b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489042 4849 scope.go:117] "RemoveContainer" containerID="c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f" Dec 03 12:55:56 crc kubenswrapper[4849]: E1203 12:55:56.489284 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f\": container with ID starting with c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f not found: ID does not exist" containerID="c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489312 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f"} err="failed to get container status \"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f\": rpc error: code = NotFound desc = could not find container \"c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f\": container with ID starting with c6afac25b248d3662f541a766519102c48d64083a244dc49affe819abeb9d90f not found: ID does not exist" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489331 4849 scope.go:117] "RemoveContainer" containerID="bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d" Dec 03 12:55:56 crc kubenswrapper[4849]: E1203 12:55:56.489574 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d\": container with ID starting with bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d not found: ID does not exist" containerID="bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489595 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d"} err="failed to get container status \"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d\": rpc error: code = NotFound desc = could not find container \"bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d\": container with ID starting with bbfe514faecd1b9d220cf913b1fa6aaf467d72dca2c7a4c2a53b0a51634ea43d not found: ID does not exist" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489607 4849 scope.go:117] "RemoveContainer" containerID="b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9" Dec 03 12:55:56 crc kubenswrapper[4849]: E1203 12:55:56.489803 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9\": container with ID starting with b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9 not found: ID does not exist" containerID="b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9" Dec 03 12:55:56 crc kubenswrapper[4849]: I1203 12:55:56.489822 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9"} err="failed to get container status \"b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9\": rpc error: code = NotFound desc = could not find container \"b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9\": container with ID starting with b58f898a73b59d3f20bbf30200fd1df52653311b632840b67bf7a1ec5297a0d9 not found: ID does not exist" Dec 03 12:55:57 crc kubenswrapper[4849]: E1203 12:55:57.857784 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:55:57 crc kubenswrapper[4849]: I1203 12:55:57.863656 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" path="/var/lib/kubelet/pods/103d9e4a-9b77-44f1-8e2d-202475857239/volumes" Dec 03 12:56:08 crc kubenswrapper[4849]: E1203 12:56:08.858011 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:56:20 crc kubenswrapper[4849]: E1203 12:56:20.858049 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:56:22 crc kubenswrapper[4849]: I1203 12:56:22.677734 4849 patch_prober.go:28] interesting pod/machine-config-daemon-hszbg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 12:56:22 crc kubenswrapper[4849]: I1203 12:56:22.677781 4849 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 12:56:22 crc kubenswrapper[4849]: I1203 12:56:22.677824 4849 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" Dec 03 12:56:22 crc kubenswrapper[4849]: I1203 12:56:22.678239 4849 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0"} pod="openshift-machine-config-operator/machine-config-daemon-hszbg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 12:56:22 crc kubenswrapper[4849]: I1203 12:56:22.678284 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerName="machine-config-daemon" containerID="cri-o://13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" gracePeriod=600 Dec 03 12:56:22 crc kubenswrapper[4849]: E1203 12:56:22.791227 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:56:23 crc kubenswrapper[4849]: I1203 12:56:23.576094 4849 generic.go:334] "Generic (PLEG): container finished" podID="d80ee321-2880-456a-9f19-c46cb0ab8128" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" exitCode=0 Dec 03 12:56:23 crc kubenswrapper[4849]: I1203 12:56:23.576129 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" event={"ID":"d80ee321-2880-456a-9f19-c46cb0ab8128","Type":"ContainerDied","Data":"13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0"} Dec 03 12:56:23 crc kubenswrapper[4849]: I1203 12:56:23.576156 4849 scope.go:117] "RemoveContainer" containerID="556d91c3e9f53fc71ca08795a69b85c762746f401d63304c8c04d60822b4d151" Dec 03 12:56:23 crc kubenswrapper[4849]: I1203 12:56:23.576694 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:56:23 crc kubenswrapper[4849]: E1203 12:56:23.576986 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:56:31 crc kubenswrapper[4849]: E1203 12:56:31.858135 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:56:35 crc kubenswrapper[4849]: I1203 12:56:35.856126 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:56:35 crc kubenswrapper[4849]: E1203 12:56:35.856514 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:56:43 crc kubenswrapper[4849]: E1203 12:56:43.861685 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:56:46 crc kubenswrapper[4849]: I1203 12:56:46.856611 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:56:46 crc kubenswrapper[4849]: E1203 12:56:46.857076 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:56:54 crc kubenswrapper[4849]: E1203 12:56:54.857985 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:57:01 crc kubenswrapper[4849]: I1203 12:57:01.856814 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:57:01 crc kubenswrapper[4849]: E1203 12:57:01.857312 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:57:08 crc kubenswrapper[4849]: E1203 12:57:08.858082 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:57:15 crc kubenswrapper[4849]: I1203 12:57:15.857095 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:57:15 crc kubenswrapper[4849]: E1203 12:57:15.857556 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:57:20 crc kubenswrapper[4849]: E1203 12:57:20.858014 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.870437 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:21 crc kubenswrapper[4849]: E1203 12:57:21.870704 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="extract-utilities" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.870718 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="extract-utilities" Dec 03 12:57:21 crc kubenswrapper[4849]: E1203 12:57:21.870740 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="registry-server" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.870745 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="registry-server" Dec 03 12:57:21 crc kubenswrapper[4849]: E1203 12:57:21.870755 4849 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="extract-content" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.870760 4849 state_mem.go:107] "Deleted CPUSet assignment" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="extract-content" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.870895 4849 memory_manager.go:354] "RemoveStaleState removing state" podUID="103d9e4a-9b77-44f1-8e2d-202475857239" containerName="registry-server" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.871788 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.880469 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.968908 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.969091 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7jv4\" (UniqueName: \"kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:21 crc kubenswrapper[4849]: I1203 12:57:21.969133 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.070591 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.070722 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7jv4\" (UniqueName: \"kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.070749 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.070899 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.071228 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.071440 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.072224 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.080577 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.104549 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7jv4\" (UniqueName: \"kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4\") pod \"community-operators-c9cpw\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.172584 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.172632 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.172816 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92r44\" (UniqueName: \"kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.193892 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.275387 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.275431 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.275504 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92r44\" (UniqueName: \"kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.276206 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.276259 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.298630 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92r44\" (UniqueName: \"kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44\") pod \"certified-operators-vspwm\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.384939 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.390217 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.768119 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:22 crc kubenswrapper[4849]: W1203 12:57:22.775872 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4df6597_7654_4dcf_9fd4_cdad1f74fac4.slice/crio-ebd6cf2b8c4b7542b0e99e69a9ca129a452815b443444861f2fc11dfb3b93fd7 WatchSource:0}: Error finding container ebd6cf2b8c4b7542b0e99e69a9ca129a452815b443444861f2fc11dfb3b93fd7: Status 404 returned error can't find the container with id ebd6cf2b8c4b7542b0e99e69a9ca129a452815b443444861f2fc11dfb3b93fd7 Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.891624 4849 generic.go:334] "Generic (PLEG): container finished" podID="1af1e93c-98bb-477d-a105-f0cf36168093" containerID="4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55" exitCode=0 Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.891685 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerDied","Data":"4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55"} Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.891708 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerStarted","Data":"ff3930860ba3200a601241e3b1d26b360bd3cf55a2228ba4d435b3fad5b89924"} Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.895159 4849 generic.go:334] "Generic (PLEG): container finished" podID="a4df6597-7654-4dcf-9fd4-cdad1f74fac4" containerID="25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91" exitCode=0 Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.895191 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerDied","Data":"25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91"} Dec 03 12:57:22 crc kubenswrapper[4849]: I1203 12:57:22.895216 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerStarted","Data":"ebd6cf2b8c4b7542b0e99e69a9ca129a452815b443444861f2fc11dfb3b93fd7"} Dec 03 12:57:23 crc kubenswrapper[4849]: I1203 12:57:23.904153 4849 generic.go:334] "Generic (PLEG): container finished" podID="1af1e93c-98bb-477d-a105-f0cf36168093" containerID="35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1" exitCode=0 Dec 03 12:57:23 crc kubenswrapper[4849]: I1203 12:57:23.904199 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerDied","Data":"35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1"} Dec 03 12:57:23 crc kubenswrapper[4849]: I1203 12:57:23.907738 4849 generic.go:334] "Generic (PLEG): container finished" podID="a4df6597-7654-4dcf-9fd4-cdad1f74fac4" containerID="d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0" exitCode=0 Dec 03 12:57:23 crc kubenswrapper[4849]: I1203 12:57:23.907767 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerDied","Data":"d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0"} Dec 03 12:57:24 crc kubenswrapper[4849]: I1203 12:57:24.915305 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerStarted","Data":"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28"} Dec 03 12:57:24 crc kubenswrapper[4849]: I1203 12:57:24.917343 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerStarted","Data":"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c"} Dec 03 12:57:24 crc kubenswrapper[4849]: I1203 12:57:24.928872 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c9cpw" podStartSLOduration=2.295757624 podStartE2EDuration="3.928858667s" podCreationTimestamp="2025-12-03 12:57:21 +0000 UTC" firstStartedPulling="2025-12-03 12:57:22.893722605 +0000 UTC m=+2189.355570388" lastFinishedPulling="2025-12-03 12:57:24.526823648 +0000 UTC m=+2190.988671431" observedRunningTime="2025-12-03 12:57:24.928093378 +0000 UTC m=+2191.389941160" watchObservedRunningTime="2025-12-03 12:57:24.928858667 +0000 UTC m=+2191.390706450" Dec 03 12:57:24 crc kubenswrapper[4849]: I1203 12:57:24.942525 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vspwm" podStartSLOduration=1.397155322 podStartE2EDuration="2.942512448s" podCreationTimestamp="2025-12-03 12:57:22 +0000 UTC" firstStartedPulling="2025-12-03 12:57:22.89633692 +0000 UTC m=+2189.358184703" lastFinishedPulling="2025-12-03 12:57:24.441694045 +0000 UTC m=+2190.903541829" observedRunningTime="2025-12-03 12:57:24.939476569 +0000 UTC m=+2191.401324362" watchObservedRunningTime="2025-12-03 12:57:24.942512448 +0000 UTC m=+2191.404360230" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.067346 4849 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.068635 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.074989 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.114806 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.114838 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.114857 4849 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds6pb\" (UniqueName: \"kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.216049 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.216089 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.216104 4849 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds6pb\" (UniqueName: \"kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.216497 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.216500 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.232957 4849 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds6pb\" (UniqueName: \"kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb\") pod \"redhat-marketplace-622qw\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.380637 4849 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.749460 4849 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:25 crc kubenswrapper[4849]: W1203 12:57:25.751732 4849 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcc8d36c_d769_4dc3_a7c9_3453fd118d8e.slice/crio-5b979fb00079ad2a1241439f008b460272428050cd5310606f8ed1bcda4444ba WatchSource:0}: Error finding container 5b979fb00079ad2a1241439f008b460272428050cd5310606f8ed1bcda4444ba: Status 404 returned error can't find the container with id 5b979fb00079ad2a1241439f008b460272428050cd5310606f8ed1bcda4444ba Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.924482 4849 generic.go:334] "Generic (PLEG): container finished" podID="fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" containerID="410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4" exitCode=0 Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.924519 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerDied","Data":"410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4"} Dec 03 12:57:25 crc kubenswrapper[4849]: I1203 12:57:25.925260 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerStarted","Data":"5b979fb00079ad2a1241439f008b460272428050cd5310606f8ed1bcda4444ba"} Dec 03 12:57:26 crc kubenswrapper[4849]: I1203 12:57:26.932436 4849 generic.go:334] "Generic (PLEG): container finished" podID="fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" containerID="d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327" exitCode=0 Dec 03 12:57:26 crc kubenswrapper[4849]: I1203 12:57:26.932489 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerDied","Data":"d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327"} Dec 03 12:57:27 crc kubenswrapper[4849]: I1203 12:57:27.939887 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerStarted","Data":"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12"} Dec 03 12:57:27 crc kubenswrapper[4849]: I1203 12:57:27.956307 4849 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-622qw" podStartSLOduration=1.41598426 podStartE2EDuration="2.956294967s" podCreationTimestamp="2025-12-03 12:57:25 +0000 UTC" firstStartedPulling="2025-12-03 12:57:25.925669476 +0000 UTC m=+2192.387517260" lastFinishedPulling="2025-12-03 12:57:27.465980184 +0000 UTC m=+2193.927827967" observedRunningTime="2025-12-03 12:57:27.952114117 +0000 UTC m=+2194.413961900" watchObservedRunningTime="2025-12-03 12:57:27.956294967 +0000 UTC m=+2194.418142750" Dec 03 12:57:28 crc kubenswrapper[4849]: I1203 12:57:28.856269 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:57:28 crc kubenswrapper[4849]: E1203 12:57:28.856587 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.194894 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.195244 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.224731 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.386491 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.386536 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.414345 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.990807 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:32 crc kubenswrapper[4849]: I1203 12:57:32.991200 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:33 crc kubenswrapper[4849]: I1203 12:57:33.846436 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:33 crc kubenswrapper[4849]: E1203 12:57:33.860421 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:57:34 crc kubenswrapper[4849]: I1203 12:57:34.973787 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vspwm" podUID="a4df6597-7654-4dcf-9fd4-cdad1f74fac4" containerName="registry-server" containerID="cri-o://6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c" gracePeriod=2 Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.249493 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.249842 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c9cpw" podUID="1af1e93c-98bb-477d-a105-f0cf36168093" containerName="registry-server" containerID="cri-o://46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28" gracePeriod=2 Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.362910 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.381081 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.381118 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.413757 4849 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.446965 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities\") pod \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.447083 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content\") pod \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.447136 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92r44\" (UniqueName: \"kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44\") pod \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\" (UID: \"a4df6597-7654-4dcf-9fd4-cdad1f74fac4\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.448463 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities" (OuterVolumeSpecName: "utilities") pod "a4df6597-7654-4dcf-9fd4-cdad1f74fac4" (UID: "a4df6597-7654-4dcf-9fd4-cdad1f74fac4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.452067 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44" (OuterVolumeSpecName: "kube-api-access-92r44") pod "a4df6597-7654-4dcf-9fd4-cdad1f74fac4" (UID: "a4df6597-7654-4dcf-9fd4-cdad1f74fac4"). InnerVolumeSpecName "kube-api-access-92r44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.483353 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4df6597-7654-4dcf-9fd4-cdad1f74fac4" (UID: "a4df6597-7654-4dcf-9fd4-cdad1f74fac4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.549206 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.549339 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.549350 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92r44\" (UniqueName: \"kubernetes.io/projected/a4df6597-7654-4dcf-9fd4-cdad1f74fac4-kube-api-access-92r44\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.551084 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.650758 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7jv4\" (UniqueName: \"kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4\") pod \"1af1e93c-98bb-477d-a105-f0cf36168093\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.650895 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities\") pod \"1af1e93c-98bb-477d-a105-f0cf36168093\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.650963 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content\") pod \"1af1e93c-98bb-477d-a105-f0cf36168093\" (UID: \"1af1e93c-98bb-477d-a105-f0cf36168093\") " Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.651593 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities" (OuterVolumeSpecName: "utilities") pod "1af1e93c-98bb-477d-a105-f0cf36168093" (UID: "1af1e93c-98bb-477d-a105-f0cf36168093"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.653523 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4" (OuterVolumeSpecName: "kube-api-access-l7jv4") pod "1af1e93c-98bb-477d-a105-f0cf36168093" (UID: "1af1e93c-98bb-477d-a105-f0cf36168093"). InnerVolumeSpecName "kube-api-access-l7jv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.688028 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1af1e93c-98bb-477d-a105-f0cf36168093" (UID: "1af1e93c-98bb-477d-a105-f0cf36168093"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.753247 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7jv4\" (UniqueName: \"kubernetes.io/projected/1af1e93c-98bb-477d-a105-f0cf36168093-kube-api-access-l7jv4\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.753501 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.753574 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1af1e93c-98bb-477d-a105-f0cf36168093-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.980725 4849 generic.go:334] "Generic (PLEG): container finished" podID="1af1e93c-98bb-477d-a105-f0cf36168093" containerID="46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28" exitCode=0 Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.980783 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c9cpw" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.980796 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerDied","Data":"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28"} Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.980821 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c9cpw" event={"ID":"1af1e93c-98bb-477d-a105-f0cf36168093","Type":"ContainerDied","Data":"ff3930860ba3200a601241e3b1d26b360bd3cf55a2228ba4d435b3fad5b89924"} Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.980836 4849 scope.go:117] "RemoveContainer" containerID="46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.983920 4849 generic.go:334] "Generic (PLEG): container finished" podID="a4df6597-7654-4dcf-9fd4-cdad1f74fac4" containerID="6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c" exitCode=0 Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.984022 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vspwm" Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.984049 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerDied","Data":"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c"} Dec 03 12:57:35 crc kubenswrapper[4849]: I1203 12:57:35.984100 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vspwm" event={"ID":"a4df6597-7654-4dcf-9fd4-cdad1f74fac4","Type":"ContainerDied","Data":"ebd6cf2b8c4b7542b0e99e69a9ca129a452815b443444861f2fc11dfb3b93fd7"} Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.002836 4849 scope.go:117] "RemoveContainer" containerID="35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.009244 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.014298 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c9cpw"] Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.017174 4849 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.019473 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.023556 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vspwm"] Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.025992 4849 scope.go:117] "RemoveContainer" containerID="4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.041833 4849 scope.go:117] "RemoveContainer" containerID="46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.042121 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28\": container with ID starting with 46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28 not found: ID does not exist" containerID="46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042157 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28"} err="failed to get container status \"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28\": rpc error: code = NotFound desc = could not find container \"46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28\": container with ID starting with 46978664c025d5cce2de81eca566557d439e62e372f5e272a82d93a75653bd28 not found: ID does not exist" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042180 4849 scope.go:117] "RemoveContainer" containerID="35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.042413 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1\": container with ID starting with 35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1 not found: ID does not exist" containerID="35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042437 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1"} err="failed to get container status \"35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1\": rpc error: code = NotFound desc = could not find container \"35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1\": container with ID starting with 35f027c48fc76f3f5f9097fe482673fd718651a962adb942974c3c54c148c3d1 not found: ID does not exist" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042451 4849 scope.go:117] "RemoveContainer" containerID="4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.042799 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55\": container with ID starting with 4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55 not found: ID does not exist" containerID="4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042831 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55"} err="failed to get container status \"4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55\": rpc error: code = NotFound desc = could not find container \"4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55\": container with ID starting with 4c64fdd031d0f42dcd1db857e6f0771f11d0ffafcf75cbf6d9a5c8c35e8c0b55 not found: ID does not exist" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.042852 4849 scope.go:117] "RemoveContainer" containerID="6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.054601 4849 scope.go:117] "RemoveContainer" containerID="d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.066608 4849 scope.go:117] "RemoveContainer" containerID="25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.078180 4849 scope.go:117] "RemoveContainer" containerID="6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.078462 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c\": container with ID starting with 6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c not found: ID does not exist" containerID="6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.078490 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c"} err="failed to get container status \"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c\": rpc error: code = NotFound desc = could not find container \"6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c\": container with ID starting with 6d576355131d138c761af853a145840b272cc260baed4300d4fad189e2e2f65c not found: ID does not exist" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.078507 4849 scope.go:117] "RemoveContainer" containerID="d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.079350 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0\": container with ID starting with d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0 not found: ID does not exist" containerID="d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.079381 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0"} err="failed to get container status \"d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0\": rpc error: code = NotFound desc = could not find container \"d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0\": container with ID starting with d4ab8ef30d768acfe74b36cdedf013f655f65d8126f5f716cba92473d92abfd0 not found: ID does not exist" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.079404 4849 scope.go:117] "RemoveContainer" containerID="25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91" Dec 03 12:57:36 crc kubenswrapper[4849]: E1203 12:57:36.079627 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91\": container with ID starting with 25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91 not found: ID does not exist" containerID="25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91" Dec 03 12:57:36 crc kubenswrapper[4849]: I1203 12:57:36.079664 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91"} err="failed to get container status \"25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91\": rpc error: code = NotFound desc = could not find container \"25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91\": container with ID starting with 25c9c877cfbb11d8f83e0d34cd499b30b7313c8c91422791889909cf63f32a91 not found: ID does not exist" Dec 03 12:57:37 crc kubenswrapper[4849]: I1203 12:57:37.862922 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1af1e93c-98bb-477d-a105-f0cf36168093" path="/var/lib/kubelet/pods/1af1e93c-98bb-477d-a105-f0cf36168093/volumes" Dec 03 12:57:37 crc kubenswrapper[4849]: I1203 12:57:37.863834 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4df6597-7654-4dcf-9fd4-cdad1f74fac4" path="/var/lib/kubelet/pods/a4df6597-7654-4dcf-9fd4-cdad1f74fac4/volumes" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.247094 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.247246 4849 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-622qw" podUID="fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" containerName="registry-server" containerID="cri-o://ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12" gracePeriod=2 Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.572259 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.621915 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content\") pod \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.622080 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds6pb\" (UniqueName: \"kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb\") pod \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.622217 4849 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities\") pod \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\" (UID: \"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e\") " Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.622835 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities" (OuterVolumeSpecName: "utilities") pod "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" (UID: "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.626660 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb" (OuterVolumeSpecName: "kube-api-access-ds6pb") pod "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" (UID: "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e"). InnerVolumeSpecName "kube-api-access-ds6pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.635391 4849 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" (UID: "fcc8d36c-d769-4dc3-a7c9-3453fd118d8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.724880 4849 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.724912 4849 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds6pb\" (UniqueName: \"kubernetes.io/projected/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-kube-api-access-ds6pb\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:38 crc kubenswrapper[4849]: I1203 12:57:38.724924 4849 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.002508 4849 generic.go:334] "Generic (PLEG): container finished" podID="fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" containerID="ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12" exitCode=0 Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.002525 4849 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-622qw" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.002541 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerDied","Data":"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12"} Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.002560 4849 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-622qw" event={"ID":"fcc8d36c-d769-4dc3-a7c9-3453fd118d8e","Type":"ContainerDied","Data":"5b979fb00079ad2a1241439f008b460272428050cd5310606f8ed1bcda4444ba"} Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.002576 4849 scope.go:117] "RemoveContainer" containerID="ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.015550 4849 scope.go:117] "RemoveContainer" containerID="d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.026335 4849 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.027497 4849 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-622qw"] Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.029684 4849 scope.go:117] "RemoveContainer" containerID="410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.047949 4849 scope.go:117] "RemoveContainer" containerID="ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12" Dec 03 12:57:39 crc kubenswrapper[4849]: E1203 12:57:39.048203 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12\": container with ID starting with ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12 not found: ID does not exist" containerID="ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.048233 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12"} err="failed to get container status \"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12\": rpc error: code = NotFound desc = could not find container \"ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12\": container with ID starting with ed6f8afa7e7087716c22c143822fcdd078b8c361a4007f8413eebcd4e758ad12 not found: ID does not exist" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.048260 4849 scope.go:117] "RemoveContainer" containerID="d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327" Dec 03 12:57:39 crc kubenswrapper[4849]: E1203 12:57:39.048479 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327\": container with ID starting with d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327 not found: ID does not exist" containerID="d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.048499 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327"} err="failed to get container status \"d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327\": rpc error: code = NotFound desc = could not find container \"d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327\": container with ID starting with d3e3f1b38289a15c3cd27762806b6a470f97e3a89781dc0e9a150ef1f4789327 not found: ID does not exist" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.048565 4849 scope.go:117] "RemoveContainer" containerID="410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4" Dec 03 12:57:39 crc kubenswrapper[4849]: E1203 12:57:39.048780 4849 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4\": container with ID starting with 410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4 not found: ID does not exist" containerID="410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.048800 4849 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4"} err="failed to get container status \"410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4\": rpc error: code = NotFound desc = could not find container \"410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4\": container with ID starting with 410b45417aea3b13c90e57e3e9331438f3914e05ba59b8f33755b33ad0f1a4e4 not found: ID does not exist" Dec 03 12:57:39 crc kubenswrapper[4849]: I1203 12:57:39.863628 4849 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcc8d36c-d769-4dc3-a7c9-3453fd118d8e" path="/var/lib/kubelet/pods/fcc8d36c-d769-4dc3-a7c9-3453fd118d8e/volumes" Dec 03 12:57:43 crc kubenswrapper[4849]: I1203 12:57:43.860196 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:57:43 crc kubenswrapper[4849]: E1203 12:57:43.860592 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:57:45 crc kubenswrapper[4849]: E1203 12:57:45.861767 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:57:55 crc kubenswrapper[4849]: I1203 12:57:55.857182 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:57:55 crc kubenswrapper[4849]: E1203 12:57:55.857740 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:57:58 crc kubenswrapper[4849]: E1203 12:57:58.858087 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:58:09 crc kubenswrapper[4849]: I1203 12:58:09.856820 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:58:09 crc kubenswrapper[4849]: E1203 12:58:09.857299 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:58:13 crc kubenswrapper[4849]: E1203 12:58:13.861227 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:58:23 crc kubenswrapper[4849]: I1203 12:58:23.860145 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:58:23 crc kubenswrapper[4849]: E1203 12:58:23.860764 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:58:24 crc kubenswrapper[4849]: E1203 12:58:24.858586 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:58:35 crc kubenswrapper[4849]: E1203 12:58:35.858017 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:58:38 crc kubenswrapper[4849]: I1203 12:58:38.856861 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:58:38 crc kubenswrapper[4849]: E1203 12:58:38.857185 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:58:50 crc kubenswrapper[4849]: E1203 12:58:50.858159 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" Dec 03 12:58:51 crc kubenswrapper[4849]: I1203 12:58:51.856095 4849 scope.go:117] "RemoveContainer" containerID="13647c985297e4b9bb8f870a49341471ac5d720967691b185604c8cefa84ffd0" Dec 03 12:58:51 crc kubenswrapper[4849]: E1203 12:58:51.856450 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hszbg_openshift-machine-config-operator(d80ee321-2880-456a-9f19-c46cb0ab8128)\"" pod="openshift-machine-config-operator/machine-config-daemon-hszbg" podUID="d80ee321-2880-456a-9f19-c46cb0ab8128" Dec 03 12:59:02 crc kubenswrapper[4849]: E1203 12:59:02.858497 4849 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.155:5001/openstack-k8s-operators/openstack-operator-index:b102924657dd294d08db769acac26201e395a333\\\"\"" pod="openstack-operators/openstack-operator-index-qjkcg" podUID="d93bf9a7-2e41-4abf-9ec8-04480010f205" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114032035024437 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114032036017355 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114025225016502 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114025225015452 5ustar corecore